Beispiel #1
0
    mu_2 = [0, 1.5]
    sigma_1 = [[1, 0.75], [0.75, 1]]
    sigma_2 = [[1, 0.75], [0.75, 1]]

    train_x_0 = np.random.multivariate_normal(mu_1, sigma_1, size=500)
    train_x_1 = np.random.multivariate_normal(mu_2, sigma_2, size=500)
    train_y_0 = np.zeros(500)
    train_y_1 = np.ones(500)
    test_x_0 = np.random.multivariate_normal(mu_1, sigma_1, size=250)
    test_x_1 = np.random.multivariate_normal(mu_2, sigma_2, size=250)
    test_y_0 = np.zeros(250)
    test_y_1 = np.ones(250)

    train_x = np.concatenate((train_x_0, train_x_1), axis=0)
    train_y = np.concatenate((train_y_0, train_y_1), axis=0)
    test_x = np.concatenate((test_x_0, test_x_1), axis=0)
    test_y = np.concatenate((test_y_0, test_y_1), axis=0)

    modes = ["batch", "online"]
    lrs = [1, 0.1, 0.01, 0.001]
    for mode in modes:
        for lr in lrs:
            model = LogisticRegression(mode=mode, lr=lr)
            model.fit(train_x, train_y)
            preds = model.predict(test_x)
            print(
                f"Mode: {mode}, LR: {lr}, Iteration: {model.best_iter}, Accuracy: {model.accuracy(preds, test_y)}",
                end="\n\n")
            model.plot_boundary(test_x, test_y)
            model.plot_loss_history()
            model.plot_grad_history()
Beispiel #2
0
from sklearn.datasets import load_iris
from logisticRegression import LogisticRegression
import numpy as np
if __name__=='__main__':
    data=load_iris()
    x=data.data
    y=data.target
    lr=LogisticRegression()
    lr.fit(x,y)
    pred=lr.predict(x)
    accur=np.sum(pred==y)*1.0/y.size
    print 'lr accuracy:%f'%accur


# Standerdize the dataset
train_set_x = train_set_x_flatten / 255.
test_set_x = test_set_x_flatten / 255.
###################################################################################################

# Fit Logistic Model
LR = LogisticRegression(dim=12288)
grads, costs = LR.train(X=train_set_x,
                        Y=train_set_y,
                        num_iterations=2000,
                        learning_rate=0.005,
                        print_cost=True,
                        plot_cost=True)

# Prediction Accuracy for training data
y_pred = LR.predict(X=train_set_x)
LR.accuracy_stats(train_set_y, y_pred)

# Prediction Accuracy for test data
y_test_pred = LR.predict(X=test_set_x)
LR.accuracy_stats(test_set_y, y_test_pred)

###################################################################################################

# Experiment with different learning rates
learning_rates = [0.01, 0.001, 0.0001]
models_costs = {}
for l_rate in learning_rates:
    print(f"learning rate is :{l_rate}")
    # Fit Logistic Model
    LR = LogisticRegression(dim=12288)
import matplotlib.pyplot as plt
import os
# set current directory
os.chdir("F:\\neuralnetworksanddeeplearning\\codes")
from logisticRegression import LogisticRegression
from generalFunctions import plot_decision_boundary

######################################################################################

## Generate a dataset a plot it
np.random.seed(0)
X, y = datasets.make_moons(200, noise=0.20)
plt.scatter(x = X[:,0], y = X[:,1], c = y, cmap = plt.cm.Spectral)

# The data is not linearly separable, this means that linear classifiers like logistic regression wont be 
# able to fit the data unless you hand-engineer non-linear features (such as polynomials).
# In fact, that's one of the major advantages of Neural Networks. You don't need to worry about the
# feature engineering. The hidden layer of neural network will learn the features for you.
######################################################################################

## Logistic Regression
num_features = 2
LR = LogisticRegression(dim = num_features)
grads, costs = LR.train(X = X.T, Y = y.reshape(1, 200), print_cost = True, num_iterations = 5000, learning_rate = 0.01, plot_cost = True)
plot_decision_boundary(lambda x: LR.predict(x), X = X, y = y) 
plt.title("Logistic Regression") 

# The graph shows the decision boundary learned by our logistic regression classifier. It separates
# the data as good as it can using a straight line, but it's unable to capure the "moon-shape" of
# our data.