예제 #1
0
	A2,cache=forward_pro(X,parameters)
	predictions=np.round(A2)

	return predictions

# parameters, X_assess = predict_test_case()

# predictions = predict(X_assess,parameters)
# print("predictions mean = " + str(np.mean(predictions)))

parameters = model(X, Y, n_h = 4, num_iter=10000, print_cost=True)

# 然后用训练得出的参数进行预测。
predictions = predict(X,parameters)
print ('预测准确率是: %d' % float((np.dot(Y, predictions.T) + np.dot(1 - Y, 1 - predictions.T)) / float(Y.size) * 100) + '%')

# 将预测结果画出来。
plot_decision_boundary(lambda x: predict(x.T,parameters), X, Y.ravel())

plt.figure(figsize=(16, 32))
hidden_layer_sizes = [1, 2, 3, 4, 5, 20, 50] # 不同的神经元个数
for i, n_h in enumerate(hidden_layer_sizes):
    plt.subplot(5, 2, i + 1)
    plt.title('Hidden Layer of size %d' % n_h)
    parameters = model(X, Y, n_h, num_iter=5000)
    plot_decision_boundary(lambda x: predict(x.T,parameters), X, Y.ravel())
    predictions = predict(X,parameters)
    accuracy = float((np.dot(Y, predictions.T) + np.dot(1 - Y, 1 - predictions.T)) / float(Y.size) * 100)
    print ("{}个隐藏层神经元时的准确度是: {} %".format(n_h, accuracy))

	
예제 #2
0
X_assess, Y_assess = nn_model_test_case()
W1 = np.array([[-0.00416758,-0.00056267],[-0.02136196,0.01640271],[-0.01793436,-0.00841747], [ 0.00502881 ,-0.01245288]])
b1  = np.array([[ 0.],[ 0.],[ 0.],[ 0.]])
W2  = np.array([[-0.01057952,-0.00909008,0.00551454,0.02292208]])
b2  = np.array([[ 0.]])
a1 , a2 = PropagateFor (X_assess , W1 , W2 , b1 , b2)
print a2
pares = {'W1': W1 , 'W2' : W2 , 'b1' : b1 , 'b2' : b2}
DW2 , Db2 , DW1 , Db1 = propagateback (pares , X_assess,Y_assess, {'A1' : a1 , 'A2' : a2 } )
pares = update_parameters(pares ,{'dW2' : DW2 ,'dW1': DW1 ,'db1' : Db1 , 'db2':Db2 })
print pares
X_assess, Y_assess = nn_model_test_case()
parameters = nn_model(X_assess, Y_assess, 4, num_iterations=10000, print_cost=False)
print parameters
'''
'''


# Plot the decision boundary
plot_decision_boundary(lambda x: predict(parameters, x.T), X, Y)
plt.title("Decision Boundary for hidden layer size " + str(4))
'''
X, Y = load_planar_dataset()
parameters = nn_model(X, Y, n_h=4, num_iterations=10000, print_cost=True)
hidden_layer_sizes = [1, 2, 3, 4, 5, 20, 50]
for i, n_h in enumerate(hidden_layer_sizes):
    parameters = nn_model(X, Y, n_h, num_iterations=5000)
    predictions = predict(parameters, X)
    accuracy = float(
        (np.dot(Y, predictions.T) + np.dot(1 - Y, 1 - predictions.T)) /
        float(Y.size) * 100)
예제 #3
0
plt.scatter(X[0, :], X[1, :], s=40, c=Y.flatten(), cmap=plt.cm.Spectral)
plt.show()
'''
shape_X = X.shape
shape_Y = Y.shape
m = X.shape[1]
'''
print("The shape of X is: " + str(shape_X))
print("The shape of Y is: " + str(shape_Y))
print("I have m = %d training examples!" % (m))
'''

# 3 Simple Logistic Regression

# Train the logistic regression classifier
clf = sklearn.linear_model.LogisticRegressionCV()
clf.fit(X.T, Y.T)

# Plot the dicision boundary for logistic regression
plot_decision_boundary(lambda x: clf.predict(x), X, Y.flatten())

plt.title("Logistic Regression")
plt.show()

# Print accuracy
LR_predictions = clf.predict(X.T)
print('\nAccuracy of logistic regression: %d ' %
      float((np.dot(Y, LR_predictions) + np.dot(1 - Y, 1 - LR_predictions)) /
            float(Y.size) * 100) + '%' +
      "(percentage of correctly labelled datapoints)")
예제 #4
0
#
# Before building a full neural network, lets first see how logistic regression performs on this problem. You can use sklearn's built-in functions to do that. Run the code below to train a logistic regression classifier on the dataset.

# In[5]:

# Train the logistic regression classifier
clf = sklearn.linear_model.LogisticRegressionCV();
clf.fit(X.T, Y.T);


# You can now plot the decision boundary of these models. Run the code below.

# In[6]:

# Plot the decision boundary for logistic regression
plot_decision_boundary(lambda x: clf.predict(x), X, Y)
plt.title("Logistic Regression")

# Print accuracy
LR_predictions = clf.predict(X.T)
print ('Accuracy of logistic regression: %d ' % float((np.dot(Y,LR_predictions) + np.dot(1-Y,1-LR_predictions))/float(Y.size)*100) +
       '% ' + "(percentage of correctly labelled datapoints)")


# **Expected Output**:
#
# <table style="width:20%">
#   <tr>
#     <td>**Accuracy**</td>
#     <td> 47% </td>
#   </tr>
        plt.imshow(X[:, sel].reshape(28, 28), cmap='gray_r')
        plt.title("Number: " + str(np.argmax(Y[:, sel])))
        plt.xlabel(Y[:, sel])
        plt.show()

    else:
        # Visualize the data
        plt.scatter(X[0, :], X[1, :], s=40, c=Y[0, :], cmap=plt.cm.Spectral)
        plt.show()

        # Train the logistic regression classifier
        clf = sklearn.linear_model.LogisticRegressionCV()
        clf.fit(X.T, Y.T)

        # Plot the decision boundary for logistic regression
        plot_decision_boundary(lambda x: clf.predict(x), X, Y)
        plt.title("Logistic Regression")
        plt.show()

        # Print accuracy
        LR_predictions = clf.predict(X.T)
        print('Accuracy of logistic regression: %d ' % float(
            (np.dot(Y, LR_predictions) + np.dot(1 - Y, 1 - LR_predictions)) /
            float(Y.size) * 100) + '% ' +
              "(percentage of correctly labelled datapoints)")


def layer_sizes(X, Y):
    n_x = X.shape[0]
    n_y = Y.shape[0]
X, Y = load_planar_dataset()

n_x = X.shape[0]
n_h = 5
n_y = Y.shape[0]

model = Model(n_x, n_h, n_y)
model.fit(X, Y, num_iterations=10000, print_cost=True)
predictions = model.predict(X)

correct = 0
wrong = 0

for i in range(Y.shape[1]):
    if predictions[0, i] == Y[0, i]:
        correct += 1

    else:
        wrong += 1

accuracy = 100 * correct / (correct + wrong)
print(f"Correct: {correct} Wrong: {wrong}\nAccuracy: {accuracy}")

plot_decision_boundary(lambda x: model.predict(x.T), X, Y)
plt.title("Decision Boundary for hidden layer size " + str(4))

# X_x = X[0, :]
# X_y = X[1, :]
# c = Y[0, :]

plt.show()
plt.scatter(X[0, :], X[1, :], c=Y, s=40, cmap=plt.cm.Spectral)

shape_X = X.shape
shape_Y = Y.shape
m = Y.shape[1]  # training set size

print('The shape of X is: ' + str(shape_X))
print('The shape of Y is: ' + str(shape_Y))
print('I have m = %d training examples!' % (m))

# Train the logistic regression classifier
clf = sklearn.linear_model.LogisticRegressionCV()
clf.fit(X.T, Y.T)

# Plot the decision boundary for logistic regression
plot_decision_boundary(lambda x: clf.predict(x), X,
                       Y)  # it just generates the contour plot below.
plt.title("Logistic Regression")

LR_predictions = clf.predict(X.T)
print('Accuracy of logistic regression: %d ' %
      float((np.dot(Y, LR_predictions) + np.dot(1 - Y, 1 - LR_predictions)) /
            float(Y.size) * 100) + '% ' +
      "(percentage of correctly labelled datapoints)")


def layer_sizes(X, Y):
    n_x = X.shape[0]  # size of input layer
    n_h = 4
    n_y = Y.shape[0]  # size of output layer
    return (n_x, n_h, n_y)
예제 #8
0
# START CODE HERE ### (≈ 3 lines of code)
shape_X = X.shape
shape_Y = Y.shape
m = shape_X[1]  # training set size
### END CODE HERE ###

# print('The shape of X is: ' + str(shape_X))
# print('The shape of Y is: ' + str(shape_Y))
# print('I have m = %d training examples!' % (m))

# Train the logistic regression classifier
clf = sklearn.linear_model.LogisticRegressionCV()
clf.fit(X.T, Y.reshape(400, ).T)

# Plot the decision boundary for logistic regression
plot_decision_boundary(lambda x: clf.predict(x), X, Y.reshape(400, ))
plt.title("Logistic Regression")

# Print accuracy
LR_predictions = clf.predict(X.T)
# print('Accuracy of logistic regression: %d ' % float((np.dot(Y, LR_predictions) + np.dot(1-Y, 1-LR_predictions))/float(Y.size)*100) +
#       '% ' + "(percentage of correctly labelled datapoints)")

# GRADED FUNCTION: layer_sizes


def layer_sizes(X, Y):
    """
    Arguments:
    X -- input dataset of shape (input size, number of examples)
    Y -- labels of shape (output size, number of examples)
예제 #9
0
    # assert B1.shape == (n_h1, 1)
    #
    # # assert Z1.shape == (n_h1, m)
    # # assert A1.shape == Z1.shape
    #
    # assert W2.shape == (n_y, n_h1)
    # assert B2.shape == (n_y, 1)

    n_iter = 10000
    lr = 8
    params = initialize_parameters(n_x, n_y, n_h1)
    W1, B1, W2, B2 = params['W1'], params['B1'], params['W2'], params['B2']

    plt_x = []
    plt_y = []
    for i in range(n_iter):
        A2, cache = forward_propagation(X, params)
        grads = backward_propagation(params, cache, X, Y)
        params = update_params(params,grads,lr=lr)
        if i % 100 == 0:
            cost = -(1. / m) * np.sum(Y * np.log(A2) + (1 - Y) * (np.log(1 - A2)))
            plt_x.append(i)
            plt_y.append(cost)
            print('lr=%s,cost=%s'%(lr,cost))
    # plt.ylim(0.2,0.25)
    plot_decision_boundary(lambda x: predict(params, x.T), X, Y)
    plt.title("Decision Boundary for hidden layer size " + str(4))
    plt.show()

    predictions = predict(params, X)
    print('准确率: %d' % float((np.dot(Y, predictions.T) + np.dot(1 - Y, 1 - predictions.T)) / float(Y.size) * 100) + '%')
예제 #10
0
def predict(n_n, x):
    y = n_n.predict(x)
    y = np.where(y > 0.5, 1, 0)

    return y


if __name__ == '__main__':
    np.seterr(all='raise')  # numpy设置异常
    origin_x, origin_y = load_planar_dataset()
    train_x, train_y, test_x, test_y = split_train_test(
        origin_x, origin_y, 0.3)
    layers = (
        Layer(7, origin_x.shape[0], g=ReLu, alpha=0.3),
        # Layer(6, 5, g=ReLu, alpha=0.3, ),
        # Layer(7, 6, g=ReLu, alpha=0.3, ),
        Layer(
            8,
            7,
            g=tanh,
            alpha=0.3,
        ),
        Layer(1, 8, g=sigmoid, alpha=0.3, op=True),
    )
    nn = NN(origin_x, origin_y, layers, num_iterations=10000)
    nn.train()
    test_precision = precision(origin_y, nn.predict(origin_x))
    print('预测精确度:', test_precision)
    plot_decision_boundary(lambda x: predict(nn, x.T), origin_x, origin_y)
    plt.show()
예제 #11
0
def do_something():
    X, Y = load_planar_dataset()
    # Visualize the data:
    plt.scatter(X[0, :], X[1, :], c=Y.ravel(), s=40, cmap=plt.cm.Spectral);

    ### Shapes
    ### START CODE HERE ### (≈ 3 lines of code)
    shape_X = X.shape
    shape_Y = Y.shape
    m =  Y.size # training set size
    ### END CODE HERE ###

    print ('The shape of X is: ' + str(shape_X))
    print ('The shape of Y is: ' + str(shape_Y))
    print ('I have m = %d training examples!' % (m))

    ### Logistic Regression
    # Train the logistic regression classifier
    clf = sklearn.linear_model.LogisticRegressionCV()
    clf.fit(X.T, Y.T)
    # Plot the decision boundary for logistic regression
    plot_decision_boundary(lambda x: clf.predict(x), X, Y)
    plt.title("Logistic Regression")

    # Print accuracy
    LR_predictions = clf.predict(X.T)
    print ('Accuracy of logistic regression: %d ' % float((np.dot(Y,LR_predictions) + np.dot(1-Y,1-LR_predictions))/float(Y.size)*100) +
           '% ' + "(percentage of correctly labelled datapoints)")
    # plt.show()

    ### Layer sizes
    X_assess, Y_assess = layer_sizes_test_case()
    (n_x, n_h, n_y) = layer_sizes(X_assess, Y_assess)
    print("The size of the input layer is: n_x = " + str(n_x))
    print("The size of the hidden layer is: n_h = " + str(n_h))
    print("The size of the output layer is: n_y = " + str(n_y))

    ### Initialize parameters
    n_x, n_h, n_y = initialize_parameters_test_case()

    parameters = initialize_parameters(n_x, n_h, n_y)
    print("W1 = " + str(parameters["W1"]))
    print("b1 = " + str(parameters["b1"]))
    print("W2 = " + str(parameters["W2"]))
    print("b2 = " + str(parameters["b2"]))

    ## The Loop
    ### Forward propagation
    X_assess, parameters = forward_propagation_test_case()

    A2, cache = forward_propagation(X_assess, parameters)

    # Note: we use the mean here just to make sure that your output matches ours.
    print(np.mean(cache['Z1']) ,np.mean(cache['A1']),np.mean(cache['Z2']),np.mean(cache['A2']))

    ### Cost function
    A2, Y_assess, parameters = compute_cost_test_case()

    print("cost = " + str(compute_cost(A2, Y_assess, parameters)))

    ### Backward propagation
    parameters, cache, X_assess, Y_assess = backward_propagation_test_case()

    grads = backward_propagation(parameters, cache, X_assess, Y_assess)
    print ("dW1 = "+ str(grads["dW1"]))
    print ("db1 = "+ str(grads["db1"]))
    print ("dW2 = "+ str(grads["dW2"]))
    print ("db2 = "+ str(grads["db2"]))

    ### Update parameters

    parameters, grads = update_parameters_test_case()
    parameters = update_parameters(parameters, grads)

    print("W1 = " + str(parameters["W1"]))
    print("b1 = " + str(parameters["b1"]))
    print("W2 = " + str(parameters["W2"]))
    print("b2 = " + str(parameters["b2"]))

    ## End of Loop

    ## Integrate parts 4.1, 4.2 and 4.3 in nn_model()
    X_assess, Y_assess = nn_model_test_case()

    parameters = nn_model(X_assess, Y_assess, 4, num_iterations=10000, print_cost=False)
    print("W1 = " + str(parameters["W1"]))
    print("b1 = " + str(parameters["b1"]))
    print("W2 = " + str(parameters["W2"]))
    print("b2 = " + str(parameters["b2"]))

    ## Predictions
    parameters, X_assess = predict_test_case()

    predictions = predict(parameters, X_assess)
    print("predictions mean = " + str(np.mean(predictions)))

    ## Neural network
    # Build a model with a n_h-dimensional hidden layer
    parameters = nn_model(X, Y, n_h = 4, num_iterations = 10000, print_cost=True)

    # Plot the decision boundary
    plot_decision_boundary(lambda x: predict(parameters, x.T), X, Y)
    plt.title("Decision Boundary for hidden layer size " + str(4))

    # Print accuracy
    predictions = predict(parameters, X)
    print ('Accuracy: %d' % float((np.dot(Y,predictions.T) + \
        np.dot(1-Y,1-predictions.T))/float(Y.size)*100) + '%')

    ## Tuning hidden layer size (optional)
    # This may take about 2 minutes to run
    plt.figure(figsize=(16, 32))
    hidden_layer_sizes = [1, 2, 3, 4, 5, 20, 50]
    for i, n_h in enumerate(hidden_layer_sizes):
        plt.subplot(5, 2, i+1)
        plt.title('Hidden Layer of size %d' % n_h)
        parameters = nn_model(X, Y, n_h, num_iterations = 5000)
        plot_decision_boundary(lambda x: predict(parameters, x.T), X, Y)
        predictions = predict(parameters, X)
        accuracy = float((np.dot(Y,predictions.T) + np.dot(1-Y,1-predictions.T))/float(Y.size)*100)
        print ("Accuracy for {} hidden units: {} %".format(n_h, accuracy))

    ## Performance on other datasets
    # Datasets
    noisy_circles, noisy_moons, blobs, gaussian_quantiles, no_structure = load_extra_datasets()

    datasets = {"noisy_circles": noisy_circles,
                "noisy_moons": noisy_moons,
                "blobs": blobs,
                "gaussian_quantiles": gaussian_quantiles}

    ### START CODE HERE ### (choose your dataset)
    dataset = "noisy_moons"
    ### END CODE HERE ###

    X, Y = datasets[dataset]
    X, Y = X.T, Y.reshape(1, Y.shape[0])

    # make blobs binary
    if dataset == "blobs":
        Y = Y%2

    # Visualize the data
    plt.scatter(X[0, :], X[1, :], c=Y.ravel(), s=40, cmap=plt.cm.Spectral)
    plt.show()
shape_Y = Y.shape
m = shape_X[1] # training set size
### END CODE HERE ###

print ('The shape of X is: ' + str(shape_X))
print ('The shape of Y is: ' + str(shape_Y))
print ('I have m = %d training examples!' % (m))


# In[3]:


clf = sklearn.linear_model.LogisticRegressionCV();
clf.fit(X.T, Y.T);

plot_decision_boundary(lambda x: clf.predict(x), X, Y_baru)
plt.title("Logistic Regression")

# Print accuracy
LR_predictions = clf.predict(X.T)
print ('Accuracy of logistic regression: %d ' % float((np.dot(Y,LR_predictions) + np.dot(1-Y,1-LR_predictions))/float(Y.size)*100) +
       '% ' + "(percentage of correctly labelled datapoints)")


# In[4]:


def layer_sizes(X, Y):
    """
    Arguments:
    X -- input dataset of shape (input size, number of examples)
예제 #13
0
# Plot the decision boundary for logistic regression
#plot_decision_boundary(lambda x: clf.predict(x), X, Y)
#plt.title("Logistic Regression")

# Print accuracy
#LR_predictions = clf.predict(X.T)
#print ('Accuracy of logistic regression: %d ' % float((np.dot(Y,LR_predictions) + np.dot(1-Y,1-LR_predictions))/float(Y.size)*100) +
#       '% ' + "(percentage of correctly labelled datapoints)")

nn_model = Planar_NN()
feat_num = 4

parameters = nn_model.train(X, Y, feat_num)

predictions = nn_model.predict(parameters, X)

accuracy = float(
    (np.dot(Y, predictions.T) + np.dot(1 - Y, 1 - predictions.T)) /
    float(Y.size) * 100)

print("accuracy is {} %".format(accuracy))

# Plot the decision boundary
plot_decision_boundary(lambda x: nn_model.predict(parameters, x.T), X, Y)
plt.title("Decision Boundary for hidden layer size " + str(feat_num))

#print(parameters)

#a,b,c = nn_model.layer_sizes(X,Y)
예제 #14
0
            self.dz[self.l - 1] = self.a[self.l - 1] - y  # 确定最后一层是sigmoid
            self.dw[self.l - 1] = 1.0 / m * np.dot(self.dz[self.l - 1],
                                                   self.a[self.l - 2].T)
            self.db[self.l - 1] = 1.0 / m * np.sum(
                self.dz[self.l - 1], axis=1, keepdims=True)
            self.da[self.l - 2] = np.dot(self.w[self.l - 1].T,
                                         self.dz[self.l - 1])
            for i in range(self.l - 2, -1, -1):  # 1 0
                self.backPpRelu(i)

            #print(self.w)
            self.update(lr)

    def predict(self, x):
        for i in range(self.l - 1):  # 0,1
            self.forPpRelu(i, x)
        self.forPpSig(self.l - 1)  # 2
        return np.where(self.a[self.l - 1] > 0.5, 1, 0)


x, y = load_planar_dataset()
y = y.ravel()
para = [4, 5, 1]
dnn = deepNN()
dnn.train(x, y, 10000, 0.8)
yhat = dnn.predict(x)
print(yhat.shape)

print("precision of nn:" + str(100 - (np.sum(np.abs(yhat - y))) / 4) + "%")
plot_decision_boundary(lambda x: dnn.predict(x.T), x, y)
plt.show()
예제 #15
0
import numpy as np
import matplotlib.pyplot as plt
from planar_utils import plot_decision_boundary, sigmoid, load_planar_dataset, load_extra_datasets
from neuralnetwork import nn_model, predict

X = np.array([[0, 0], [1, 0], [0, 1], [-1, 0], [1, -1]])
X = X.T
print(X.shape)
Y = np.array([[1, 1, 0, 0, 0]])
print(Y.shape)
# Build a model with a n_h-dimensional hidden layer
parameters = nn_model(X, Y, n_h = 2, num_iterations = 5000, print_cost=True)
print(parameters)
# Plot the decision boundary
plot_decision_boundary(lambda x: predict(parameters, x.T), X, Y.flatten())
plt.title("Decision Boundary for hidden layer size " + str(2))
plt.show()
# Print accuracy
predictions = predict(parameters, X)
print ('Accuracy: %d' % float((np.dot(Y,predictions.T) + np.dot(1-Y,1-predictions.T))/float(Y.size)*100) + '%')
예제 #16
0
    ### START CODE HERE ### (≈ 2 lines of code)
    A2, cache = forward_propagation(X, parameters)
    predictions = np.around(A2)
    ### END CODE HERE ###

    return predictions


parameters, X_assess = predict_test_case()

predictions = predict(parameters, X_assess)
print("predictions mean = " + str(np.mean(predictions)))
parameters = nn_model(X, Y, n_h=4, num_iterations=10000, print_cost=True)

# Plot the decision boundary
plot_decision_boundary(lambda x: predict(parameters, x.T), X, np.squeeze(Y))
plt.title("Decision Boundary for hidden layer size " + str(4))

#plt.show()

# Print accuracy
predictions = predict(parameters, X)
print('Accuracy: %d' %
      float((np.dot(Y, predictions.T) + np.dot(1 - Y, 1 - predictions.T)) /
            float(Y.size) * 100) + '%')

# This may take about 2 minutes to run

plt.figure(figsize=(16, 32))
hidden_layer_sizes = [1, 2, 3, 4, 5, 10, 20]
for i, n_h in enumerate(hidden_layer_sizes):
shape_X = X.shape
shape_Y = Y.shape
m = X.shape[1]  # training set size
### END CODE HERE ###

print('The shape of X is: ' + str(shape_X))
print('The shape of Y is: ' + str(shape_Y))
print('I have m = %d training examples!' % (m))
""" 
3 - Simple Logistic Regression
 """
# Train the logistic regression classifier
clf = sklearn.linear_model.LogisticRegressionCV()
clf.fit(X.T, Y.T)
# Plot the decision boundary for logistic regression
plot_decision_boundary(lambda x: clf.predict(x), X, np.reshape(Y, -1))
plt.title("Logistic Regression")

# Print accuracy
LR_predictions = clf.predict(X.T)  # (400,)
print('Accuracy of logistic regression: %d ' %
      float((np.dot(Y, LR_predictions) + np.dot(1 - Y, 1 - LR_predictions)) /
            float(Y.size) * 100) + '% ' +
      "(percentage of correctly labelled datapoints)")
""" 
4 - Neural Network model
 """
X_assess, Y_assess = layer_sizes_test_case()
(n_x, n_h, n_y) = layer_sizes(X_assess, Y_assess)
print("The size of the input layer is: n_x = " + str(n_x))
print("The size of the hidden layer is: n_h = " + str(n_h))
    shape_Y = Y.shape
    m = X.shape[1]  # training set size

    print('The shape of X is: ' + str(shape_X))
    print('The shape of Y is: ' + str(shape_Y))
    print('I have m = %d training examples!' % (m))

    # Train the logistic regression classifier
    clf = sklearn.linear_model.LogisticRegressionCV()

    Y_flat = Y.flatten()
    clf.fit(X.T, Y_flat.T)
    # clf.fit(X.T, Y.T);

    # Plot the decision boundary for logistic regression
    plot_decision_boundary(lambda x: clf.predict(x), X, Y_flat, "Logistic Regression")
    # plot_decision_boundary(lambda x: clf.predict(x), X, Y)
    # plt.title("Logistic Regression")
    # plt.show(block = False)

    # Print accuracy
    LR_predictions = clf.predict(X.T)
    print('Accuracy of logistic regression: %d ' % float((np.dot(Y, LR_predictions) + np.dot(1 - Y, 1 - LR_predictions)) / float(Y.size) * 100) +
          '% ' + "(percentage of correctly labelled datapoints)")

    X_assess, Y_assess = layer_sizes_test_case()
    (n_x, n_h, n_y) = layer_sizes(X_assess, Y_assess)
    print("The size of the input layer is: n_x = " + str(n_x))
    print("The size of the hidden layer is: n_h = " + str(n_h))
    print("The size of the output layer is: n_y = " + str(n_y))
        quit()

    print("\n------------Start------------")
    print("We have", len(X), "input features and", len(X[0]), "data samples")
    print("So X.shape=", X.shape, "and Y.shape=", Y.shape)

    plt.figure(figsize=(16, 32))
    # hidden_layer_sizes = [1, 2, 3, 4, 5, 20, 50]
    # for i, n_h in enumerate(hidden_layer_sizes):

    parameters = nn_model(X, Y, n_h, num_iterations)
    plt.subplot(5, 2, 1)
    plt.title('Hidden Layer of size %d with %d iterations' %
              (n_h, num_iterations))

    planarUtils.plot_decision_boundary(lambda x: predict(parameters, x.T), X,
                                       Y)
    print("\n==================================")
    print("Predict")
    print("==================================")
    predictions = predict(parameters, X)

    print("\nCalculate accuracy")
    print(
        "accuracy = float((np.dot(Y, predictions.T) + np.dot(1-Y, 1-predictions.T))/float(Y.size)*100)"
    )
    accuracy = float(
        (np.dot(Y, predictions.T) + np.dot(1 - Y, 1 - predictions.T)) /
        float(Y.size) * 100)
    print("\nAccuracy for {} hidden units: {} %".format(n_h, accuracy))
예제 #20
0
#Given certain parameters predict the classification of that variable
def predict(parameters, X):

    A2, cache = forward_propagation(X, parameters)
    predictions = A2 > 0.5

    return predictions


#parameters = nn_model(X,Y,n_h=4,num_iterations=10000,print_cost=True)

#plot_decision_boundary(lambda x: predict(parameters, x.T), X, Y[0])
#plt.title("Decision Boundary for hidden layer size " + str(4))

# This may take about 2 minutes to run

plt.figure(figsize=(16, 32))
hidden_layer_sizes = [1, 2, 3, 4, 5, 20, 50]
for i, n_h in enumerate(hidden_layer_sizes):
    plt.subplot(5, 2, i + 1)
    plt.title('Hidden Layer of size %d' % n_h)
    parameters = nn_model(X, Y, n_h, num_iterations=5000)
    plot_decision_boundary(lambda x: predict(parameters, x.T), X, Y[0])
    predictions = predict(parameters, X)
    accuracy = float(
        (np.dot(Y, predictions.T) + np.dot(1 - Y, 1 - predictions.T)) /
        float(Y.size) * 100)
    print("Accuracy for {} hidden units: {} %".format(n_h, accuracy))

plt.show()
shape_X = X.shape
shape_Y = Y.shape
m = shape_X[1]  # training set size


print ('The shape of X is: ' + str(shape_X))
print ('The shape of Y is: ' + str(shape_Y))
print ('I have m = %d training examples!' % (m))

# Train the logistic regression classifier
clf = sklearn.linear_model.LogisticRegressionCV();
clf.fit(X.T, Y.T);

# Plot the decision boundary for logistic regression
plot_decision_boundary(lambda x: clf.predict(x), X, Y)
plt.title("Logistic Regression")                                        ###See pic2 for output plot

# Print accuracy
LR_predictions = clf.predict(X.T)
print ('Accuracy of logistic regression: %d ' % float((np.dot(Y,LR_predictions) + np.dot(1-Y,1-LR_predictions))/float(Y.size)*100) +
       '% ' + "(percentage of correctly labelled datapoints)")

######################################################################################################################################

def layer_sizes(X, Y):
    """
    Arguments:
    X -- input dataset of shape (input size, number of examples)
    Y -- labels of shape (output size, number of examples)
예제 #22
0
    for i in range(0, num_iterations):
        A2, cache = forward_prop(X, param)
        c = cost(A2, Y)
        grads = back_prop(param, cache, X, Y)
        param = update(param, grads)

        if print_cost and i % 1000 == 0:
            print('Cost is ' + str(c))

    return param


# Visual
parameters = nn_model(X, Y, 4, num_iterations=50000, print_cost=True)
# Plot the decision boundary
plot_decision_boundary(lambda x: predict(parameters, x.T).ravel(), X,
                       Y.ravel())
predictions = predict(parameters, X)
accuracy = float(
    (np.dot(Y, predictions.T) + np.dot(1 - Y, 1 - predictions.T)) /
    float(Y.size) * 100)
print("Accuracy for {} hidden units: {} %".format(4, accuracy))
plt.title("Decision Boundary for hidden layer size " + str(4))
plt.show()

# # Net Size
# plt.figure(figsize=(16, 32))
# hidden_layer_sizes = [1, 2, 3, 4, 5, 20, 50]
# for i, n_h in enumerate(hidden_layer_sizes):
#     plt.subplot(5, 2, i+1)
#     plt.title('Hidden Layer of size %d' % n_h)
#     parameters = nn_model(X, Y, n_h, num_iterations = 5000)
    Arguments:
    X -- input data of size (n_x, m)

    Returns
    predictions -- vector of predictions of our model (red: 0 / blue: 1)
    """
    _, _, _, A2 = forward_propagation(X, b1, W1, b2, W2)
    Y_hat = np.round(A2)

    return Y_hat


W1, b1, W2, b2 = nn_model(X, Y, n_h = 4, num_iterations=10000, print_cost=True)
# Plot the decision boundary
plot_decision_boundary(lambda x: predict( W1, b1, W2, b2, x.T), X, Y)
plt.title("Decision Boundary for hidden layer size " + str(4))
plt.show()

predictions = predict(W1, b1, W2, b2, X)
# print(predictions)
print ('Accuracy: %d' % float((np.dot(Y, predictions.T) + np.dot(1 - Y, 1 - predictions.T)) / float(Y.size) * 100) + '%')

# Accuracy is really high compared to Logistic Regression. The model has learnt the leaf patterns of the flower!
# Neural networks are able to learn even highly non-linear decision boundaries, unlike logistic regression.





# plt.figure(figsize=(16, 32))
예제 #24
0
X, Y = load_planar_dataset()

print(X.shape)

print(Y.shape)

Y = np.squeeze(Y)

# Visualize the data:
plot.scatter(X[0, :], X[1, :], c=Y, s=40, cmap=plot.cm.Spectral)

plot.show()

clf = sklearn.linear_model.LogisticRegressionCV()
clf.fit(X.T, np.ravel(Y.T))

# Plot the decision boundary for logistic regression

plot_decision_boundary(lambda x: clf.predict(x), X, Y)
plot.title("Logistic Regression")

plot.show()

# Print accuracy
LR_predictions = clf.predict(X.T)
print('Accuracy of logistic regression: %d ' %
      float((np.dot(Y, LR_predictions) + np.dot(1 - Y, 1 - LR_predictions)) /
            float(Y.size) * 100) + '% ' +
      "(percentage of correctly labelled datapoints)")
예제 #25
0
shape_X = X.shape
shape_Y = Y.shape
m = shape_Y[1]

print('The shape of X is: ' + str(shape_X))
print('The shape of Y is: ' + str(shape_Y))
print('I have m = %d training examples!' % (m))

#================================Simple Logistic Regression================================

# Train the logistic regression classifier
clf = sklearn.linear_model.LogisticRegressionCV()  # 使用交叉验证集来确定正则化系数
clf.fit(X.T, Y.T)

# Plot the decision boundary for logistic regression
plot_decision_boundary(lambda x: clf.predict(x), X,
                       Y)  # planar_utils提供的画决策边界的函数
plt.title("Logistic Regression")
plt.show()

# Print accuracy
LR_predictions = clf.predict(X.T)
print('Accuracy of logistic regression: %d ' %
      float((np.dot(Y, LR_predictions) + np.dot(1 - Y, 1 - LR_predictions)) /
            float(Y.size) * 100) + '% ' +
      "(percentage of correctly labelled datapoints)")


#===========================Defining the neural network structure==========================
def layer_sizes(X, Y):
    """
    Arguments:
예제 #26
0
shape_Y = Y.shape
m = shape_X[1]  # training set size

print ('The shape of X is: ' + str(shape_X))
print ('The shape of Y is: ' + str(shape_Y))
print ('I have m = %d training examples!' % (m))

# ========================================================
# Simple Logistic Regression

# Train the logistic regression classifier
clf = sklearn.linear_model.LogisticRegressionCV();
clf.fit(X.T, Y.T);

# Plot the decision boundary for logistic regression
plot_decision_boundary(lambda x: clf.predict(x), X, Y)
plt.title("Logistic Regression")

# Print accuracy
LR_predictions = clf.predict(X.T)
print ('Accuracy of logistic regression: %d ' % float(
    (np.dot(Y, LR_predictions) + np.dot(1 - Y, 1 - LR_predictions)) / float(Y.size) * 100) +
       '% ' + "(percentage of correctly labelled datapoints)")
# load the data
X, Y = load_planar_dataset()

# plot the data
plt.scatter(X[0, :], X[1, :], c=Y, s=40, cmap=plt.cm.Spectral)

# train the logistic regression classifier
clf = sklearn.linear_model.LogisticRegressionCV();
예제 #27
0
#plt.scatter(X[0,:], X[1,:], s = 40,c = Y, cmap = plt.cm.Spectral);
plt.scatter(X[0, :], X[1, :], c=np.reshape(Y, -1), s=40, cmap=plt.cm.Spectral)

# Making sense of the dataset
# Calculating the number of training examples

m = X.shape[1]
print("There are %s training examples" % (m))
print("Shape of X:", X.shape)
print("Shape of Y:", Y.shape)

# Test the performance of logistic regression on this dataset

logistic = sklearn.linear_model.LogisticRegressionCV()
logistic.fit(X.T, Y.T)

# Plot the flower dataset with the classification output
model = lambda x: logistic.predict(x)
plot_decision_boundary(model, X, Y)
plt.title("Logistic Regression")

LR_predictions = logistic.predict(X.T)

print('Accuracy of logistic regression: %d ' %
      float((np.dot(Y, LR_predictions) + np.dot(1 - Y, 1 - LR_predictions)) /
            float(Y.size) * 100) + '% ' +
      "(percentage of correctly labelled datapoints)")
# Accuracy = (TP+TN)/(TP+TN+FP+FN),
# where TN= true negative, TP=  true postive, FP=false positive, FN = false negative