def test_print_progress_3():
    lr = LogisticRegression(epochs=100,
                            eta=0.01,
                            minibatches=1,
                            print_progress=3,
                            random_seed=1)
    lr.fit(X, y)
def test_logistic_regression_gd():
    t = np.array([0.51, 1.18, 4.40])
    lr = LogisticRegression(epochs=100, eta=0.01, learning='gd', random_seed=0)

    lr.fit(X, y)  # 0, 1 class
    np.testing.assert_almost_equal(lr.w_, t, 2)
    assert((y == lr.predict(X)).all())
Beispiel #3
0
def test_logistic_regression_gd():
    t = np.array([0.51, 1.18, 4.40])
    lr = LogisticRegression(epochs=100, eta=0.01, learning='gd', random_seed=0)

    lr.fit(X, y)  # 0, 1 class
    np.testing.assert_almost_equal(lr.w_, t, 2)
    assert ((y == lr.predict(X)).all())
Beispiel #4
0
def test_print_progress_3():
    lr = LogisticRegression(epochs=100,
                            eta=0.01,
                            minibatches=1,
                            print_progress=3,
                            random_seed=1)
    lr.fit(X, y)
Beispiel #5
0
def test_predict_proba():
    lr = LogisticRegression(epochs=100, eta=0.01, minibatches=1, random_seed=1)

    lr.fit(X, y)
    idx = [0, 48, 99]  # sample labels: 0, 0, 1
    y_pred = lr.predict_proba(X[idx])
    expect = np.array([0.009, 0.012, 0.993])
    np.testing.assert_almost_equal(y_pred, expect, 3)
Beispiel #6
0
def test_score_function():
    lr = LogisticRegression(epochs=100,
                            eta=0.01,
                            minibatches=1,
                            random_seed=1)

    lr.fit(X, y)
    acc = lr.score(X, y)
    assert acc == 1.0, "Acc: %s" % acc
Beispiel #7
0
def test_ary_persistency_in_shuffling():
    orig = X.copy()
    lr = LogisticRegression(eta=0.01,
                            epochs=100,
                            minibatches=len(y),
                            l2_lambda=1.0,
                            random_seed=1)
    lr.fit(X, y)
    np.testing.assert_almost_equal(orig, X, 6)
def test_logistic_regression_sgd():
    t = np.array([0.50, 1.16, 4.38])
    lr = LogisticRegression(epochs=100,
                            eta=0.01,
                            minibatches=len(y),
                            random_seed=0)

    lr.fit(X, y)  # 0, 1 class
    np.testing.assert_almost_equal(lr.w_, t, 2)
    assert ((y == lr.predict(X)).all())
def test_logistic_regression_sgd():
    t = np.array([0.53, 1.2, 4.4])
    lr = LogisticRegression(epochs=100,
                            eta=0.01,
                            minibatches=len(y),
                            random_seed=1)

    lr.fit(X, y)  # 0, 1 class
    np.testing.assert_almost_equal(lr.w_, t, 2)
    assert((y == lr.predict(X)).all())
def test_l2_regularization_gd():
    lr = LogisticRegression(eta=0.01, epochs=20,
                    learning='gd', regularization='l2',
                    lambda_=1.0, random_seed=0)
    lr.fit(X, y)
    y_pred = lr.predict(X)
    expect_weights = np.array([ 0.252,  1.186,  2.296])

    np.testing.assert_almost_equal(lr.w_, expect_weights, 3)
    acc = sum(y_pred == y) / len(y)
    assert(acc == 1.0)
def test_score_function():
    t = np.array([0.52, 1.2, 4.4])
    lr = LogisticRegression(epochs=100,
                            eta=0.01,
                            minibatches=1,
                            random_seed=1)

    lr.fit(X, y)  # 0, 1 class
    np.testing.assert_almost_equal(lr.w_, t, 2)
    acc = lr.score(X, y)
    assert acc == 1.0, "Acc: %s" % acc
def test_predict_proba():
    lr = LogisticRegression(epochs=100,
                            eta=0.01,
                            minibatches=1,
                            random_seed=1)

    lr.fit(X, y)
    idx = [0, 48, 99]  # sample labels: 0, 0, 1
    y_pred = lr.predict_proba(X[idx])
    expect = np.array([0.009, 0.012, 0.993])
    np.testing.assert_almost_equal(y_pred, expect, 3)
Beispiel #13
0
def test_logistic_regression_gd():
    w = np.array([[1.2], [4.4]])
    b = np.array([0.52])
    lr = LogisticRegression(epochs=100, eta=0.01, minibatches=1, random_seed=1)

    lr.fit(X, y)
    np.testing.assert_almost_equal(lr.w_, w, 2)
    np.testing.assert_almost_equal(lr.b_, b, 2)
    y_pred = lr.predict(X)
    acc = np.sum(y == y_pred, axis=0) / float(X.shape[0])
    assert acc == 1.0, "Acc: %s" % acc
Beispiel #14
0
def test_logistic_regression_sgd():
    w = np.array([[1.18], [4.38]])
    lr = LogisticRegression(epochs=100,
                            eta=0.01,
                            minibatches=len(y),
                            random_seed=1)

    lr.fit(X, y)  # 0, 1 class
    np.testing.assert_almost_equal(lr.w_, w, 2)
    y_pred = lr.predict(X)
    acc = np.sum(y == y_pred, axis=0) / float(X.shape[0])
    assert acc == 1.0, "Acc: %s" % acc
def test_logistic_regression_gd():
    t = np.array([0.52, 1.2, 4.4])
    lr = LogisticRegression(epochs=100,
                            eta=0.01,
                            minibatches=1,
                            random_seed=1)

    lr.fit(X, y)  # 0, 1 class
    np.testing.assert_almost_equal(lr.w_, t, 2)
    y_pred = lr.predict(X)
    acc = np.sum(y == y_pred, axis=0) / float(X.shape[0])
    assert acc == 1.0, "Acc: %s" % acc
def test_l2_regularization_gd():
    lr = LogisticRegression(eta=0.01,
                            epochs=20,
                            minibatches=1,
                            l2_lambda=1.0,
                            regularization='l2',
                            random_seed=1)
    lr.fit(X, y)
    y_pred = lr.predict(X)
    expect_weights = np.array([0.303, 1.066, 2.329])

    np.testing.assert_almost_equal(lr.w_, expect_weights, 3)
    acc = sum(y_pred == y) / len(y)
    assert(acc == 1.0)
def test_l2_regularization_gd():
    lr = LogisticRegression(eta=0.01,
                            epochs=20,
                            minibatches=1,
                            l2_lambda=1.0,
                            random_seed=1)
    lr.fit(X, y)
    y_pred = lr.predict(X)
    expect_weights = np.array([0.153, 1.055, 2.284])

    np.testing.assert_almost_equal(lr.w_, expect_weights, 3)
    y_pred = lr.predict(X)
    acc = np.sum(y == y_pred, axis=0) / float(X.shape[0])
    assert acc == 1.0, "Acc: %s" % acc
def test_l2_regularization_gd():
    lr = LogisticRegression(eta=0.01,
                            epochs=20,
                            minibatches=1,
                            l2_lambda=1.0,
                            regularization='l2',
                            random_seed=0)
    lr.fit(X, y)
    y_pred = lr.predict(X)
    expect_weights = np.array([0.115, 1.032, 2.272])

    np.testing.assert_almost_equal(lr.w_, expect_weights, 3)
    acc = sum(y_pred == y) / len(y)
    assert (acc == 1.0)
Beispiel #19
0
def test_l2_regularization_sgd():
    lr = LogisticRegression(eta=0.01,
                            epochs=20,
                            learning='sgd',
                            l2_lambda=1.0,
                            regularization='l2',
                            random_seed=0)
    lr.fit(X, y)
    y_pred = lr.predict(X)
    expect_weights = np.array([0.09, 0.232, 0.35])

    np.testing.assert_almost_equal(lr.w_, expect_weights, 2)
    acc = sum(y_pred == y) / len(y)
    assert (acc == 1.0)
def test_l2_regularization_sgd():
    lr = LogisticRegression(eta=0.01,
                            epochs=100,
                            minibatches=len(y),
                            l2_lambda=1.0,
                            regularization='l2',
                            random_seed=1)
    lr.fit(X, y)
    y_pred = lr.predict(X)
    expect_weights = np.array([-2.73e-04, 2.40e-01, 3.53e-01])

    np.testing.assert_almost_equal(lr.w_, expect_weights, 2)
    acc = sum(y_pred == y) / float(len(y))

    assert(acc == 0.97)
Beispiel #21
0
    def __init__(self,
                 eta=0.01,
                 epochs=50,
                 l2_lambda=0.0,
                 minibatches=1,
                 random_seed=None,
                 print_progress=0):
        epochs = int(epochs)

        warnings.filterwarnings(module='mlxtend*',
                                action='ignore',
                                category=FutureWarning)
        _LogisticRegression.__init__(self, eta, epochs, l2_lambda, minibatches,
                                     random_seed, print_progress)
        BaseWrapperClf.__init__(self)
def test_l2_regularization_sgd():
    lr = LogisticRegression(eta=0.01,
                            epochs=100,
                            minibatches=len(y),
                            l2_lambda=1.0,
                            regularization='l2',
                            random_seed=0)
    lr.fit(X, y)
    y_pred = lr.predict(X)
    expect_weights = np.array([0., 0.24, 0.35])

    np.testing.assert_almost_equal(lr.w_, expect_weights, 2)
    acc = sum(y_pred == y) / float(len(y))

    assert (acc == 0.97)
def test_refit_weights():
    w = np.array([[1.2], [4.4]])
    b = np.array([0.52])
    lr = LogisticRegression(epochs=50,
                            eta=0.01,
                            minibatches=1,
                            random_seed=1)

    lr.fit(X, y)
    w1 = lr.w_[0][0]
    w2 = lr.w_[0][0]
    lr.fit(X, y, init_params=False)

    assert w1 != lr.w_[0][0]
    assert w2 != lr.w_[1][0]
    np.testing.assert_almost_equal(lr.w_, w, 2)
    np.testing.assert_almost_equal(lr.b_, b, 2)
Beispiel #24
0
def test_invalid_labels_2():
    y1 = np.where(y == 0, -1, 1)
    lr = LogisticRegression(epochs=15, eta=0.01, random_seed=1)
    assert_raises(AttributeError,
                  'y array must not contain negative labels.\nFound [-1  1]',
                  lr.fit,
                  X,
                  y1,
                  {(-1, 1)})
Beispiel #25
0
def test_invalid_labels_1():
    y1 = np.where(y == 0, 2, 1)
    lr = LogisticRegression(epochs=15, eta=0.01, random_seed=1)

    if sys.version_info >= (3, 0):
        objtype = '{(0, 1)}'
    else:
        objtype = 'set([(0, 1)])'

    expect = 'Labels not in %s.\nFound (1, 2)' % objtype

    assert_raises(AttributeError, expect, lr.fit, X, y1, {(0, 1)})
Beispiel #26
0
def test_l2_regularization_sgd():
    lr = LogisticRegression(eta=0.01,
                            epochs=100,
                            minibatches=len(y),
                            l2_lambda=1.0,
                            random_seed=1)
    lr.fit(X, y)
    y_pred = lr.predict(X)
    expect_weights = np.array([[0.24], [0.35]])

    np.testing.assert_almost_equal(lr.w_, expect_weights, 2)
    y_pred = lr.predict(X)
    acc = np.sum(y == y_pred, axis=0) / float(X.shape[0])
    assert acc == 0.97, "Acc: %s" % acc
Beispiel #27
0
def test_refit_weights():
    w = np.array([[1.2], [4.4]])
    b = np.array([0.52])
    lr = LogisticRegression(epochs=50, eta=0.01, minibatches=1, random_seed=1)

    lr.fit(X, y)
    w1 = lr.w_[0][0]
    w2 = lr.w_[0][0]
    lr.fit(X, y, init_params=False)

    assert w1 != lr.w_[0][0]
    assert w2 != lr.w_[1][0]
    np.testing.assert_almost_equal(lr.w_, w, 2)
    np.testing.assert_almost_equal(lr.b_, b, 2)
def test_invalid_labels_1():
    y1 = np.where(y == 0, 2, 1)
    lr = LogisticRegression(epochs=15, eta=0.01, random_seed=1)
    assert_raises(AttributeError, 'Labels not in {(0, 1)}.\nFound (1, 2)',
                  lr.fit, X, y1, {(0, 1)})
from mlxtend.data import iris_data
from mlxtend.evaluate import plot_decision_regions
from mlxtend.classifier import LogisticRegression
import matplotlib.pyplot as plt

# Loading Data

X, y = iris_data()
X = X[:, [0, 3]] # sepal length and petal width
X = X[0:100] # class 0 and class 1
y = y[0:100] # class 0 and class 1

# standardize
X[:,0] = (X[:,0] - X[:,0].mean()) / X[:,0].std()
X[:,1] = (X[:,1] - X[:,1].mean()) / X[:,1].std()



lr = LogisticRegression(eta=0.01, epochs=100, learning='sgd')
lr.fit(X, y)

plot_decision_regions(X, y, clf=lr)
plt.title('Logistic Regression - Stochastic Gradient Descent')
plt.show()

print(lr.w_)

plt.plot(range(len(lr.cost_)), lr.cost_)
plt.xlabel('Iterations')
plt.ylabel('Missclassifications')
plt.show()
Beispiel #30
0
import matplotlib.pyplot as plt

X, y = iris_data()

X = X[:, [0, 3]]
X = X[0:100]
y = y[0:100]

X[:, 0] = (X[:, 0] - X[:, 0].mean()) / X[:, 0].std()
X[:, 1] = (X[:, 1] - X[:, 1].mean()) / X[:, 1].std()

lr = LogisticRegression(
    eta=0.1,
    l2_lambda=0.0,
    epochs=500,
    #minibatches=1, # 1 for Gradient Descent
    #minibatches=len(y), #  len(y) for SGD learning
    minibatches=5,  # 100/5 = 20 -> minibatch-s
    random_seed=1,
    print_progress=3)
lr.fit(X, y)

y_pred = lr.predict(X)
print('Last 3 Class Labels: %s' % y_pred[-3:])
y_pred = lr.predict_proba(X)
print('Last 3 Class Labels: %s' % y_pred[-3:])

plot_decision_regions(X, y, clf=lr)
plt.title("Logistic regression - gd")
plt.show()
Beispiel #31
0
import matplotlib.pyplot as plt

X, y = iris_data()

X = X[:, [0, 3]]
X = X[0:100]
y = y[0:100]

X[:,0] = (X[:,0] - X[:,0].mean()) / X[:,0].std()
X[:,1] = (X[:,1] - X[:,1].mean()) / X[:,1].std()

lr = LogisticRegression(eta = 0.1,
                        l2_lambda=0.0,
                        epochs=500,
                        #minibatches=1, # 1 for Gradient Descent
                        #minibatches=len(y), #  len(y) for SGD learning
                        minibatches=5, # 100/5 = 20 -> minibatch-s
                        random_seed=1,
                        print_progress=3)
lr.fit(X, y)


y_pred = lr.predict(X)
print('Last 3 Class Labels: %s' % y_pred[-3:])
y_pred = lr.predict_proba(X)
print('Last 3 Class Labels: %s' % y_pred[-3:])


plot_decision_regions(X, y, clf=lr)
plt.title("Logistic regression - gd")
plt.show()
test_len = (int) (len - train_len)

Y_test = Y[ train_len : ]

print("len ", len)
print("Y train " , Y_train.size)
print("X train " , (int) (X_train.size/6))

print("Y test  " , Y_test.size)
print("X test  " , (int) (X_test.size/6))

print(type(Y_test))

lr = LogisticRegression(eta=0.05,
                        l2_lambda=0.0,
                        epochs=50,
                        minibatches=1, # for Gradient Descent
                        random_seed=1,
                        print_progress=3)
lr.fit(X_train, Y_train)

print("...")
pre = lr.predict(X_train)


correct = 0
total = 0

i2 = 0
while(i2 < Y_test.size):
    if(Y_test[i2] == pre[i2]):
        correct += 1
Beispiel #33
0
def test_clone():
    log = LogisticRegression()
    clone(log)
Beispiel #34
0
from mlxtend.plotting import plot_decision_regions
from mlxtend.classifier import LogisticRegression
import matplotlib.pyplot as plt

# Loading Data
X, y = iris_data()
X = X[:, [0, 3]]  # sepal length and petal width
X = X[0:100]  # class 0 and class 1
y = y[0:100]  # class 0 and class 1

# standardize
X[:, 0] = (X[:, 0] - X[:, 0].mean()) / X[:, 0].std()
X[:, 1] = (X[:, 1] - X[:, 1].mean()) / X[:, 1].std()

lr = LogisticRegression(
    eta=0.1,
    l2_lambda=0.0,
    epochs=100,
    minibatches=1,  # for Gradient Descent
    random_seed=1,
    print_progress=3)
lr.fit(X, y)

plot_decision_regions(X, y, clf=lr)
plt.title('Logistic Regression - Gradient Descent')
plt.show()

plt.plot(range(len(lr.cost_)), lr.cost_)
plt.xlabel('Iterations')
plt.ylabel('Cost')
plt.show()