Example #1
0
def main():

    # set a seed so that the results are consistent
    np.random.seed(1)

    # Load the data from sklearn
    X, Y = load_planar_dataset()

    # Visualize the data:
    plt.scatter(X[0, :], X[1, :], c=Y[0], s=40, cmap=plt.cm.Spectral)
    plt.show()

    # Build a model with a n_h-dimensional hidden layer
    parameters = neuralNetwork_model(X,
                                     Y,
                                     n_h=4,
                                     num_iterations=10000,
                                     print_cost=True)

    # Plot the decision boundary
    plot_decision_boundary(lambda x: predict(parameters, x.T), X, Y)
    plt.title("Decision Boundary for hidden layer size " + str(4))
    plt.show()

    # Print accuracy
    predictions = predict(parameters, X)
    print('Accuracy: %d' %
          float((np.dot(Y, predictions.T) + np.dot(1 - Y, 1 - predictions.T)) /
                float(Y.size) * 100) + '%')

    # Test the accuracy of the model with a range of different layers
    # This may take a while...

    plt.figure(figsize=(16, 32))
    hidden_layer_sizes = [1, 2, 3, 4, 5, 20, 50]

    for i, n_h in enumerate(hidden_layer_sizes):
        plt.subplot(5, 2, i + 1)
        plt.title('Hidden Layer of size %d' % n_h)
        parameters = neuralNetwork_model(X, Y, n_h, num_iterations=5000)
        plot_decision_boundary(lambda x: predict(parameters, x.T), X, Y)
        plt.show()
        predictions = predict(parameters, X)
        accuracy = float(
            (np.dot(Y, predictions.T) + np.dot(1 - Y, 1 - predictions.T)) /
            float(Y.size) * 100)
        print("Accuracy for {} hidden units: {} %".format(n_h, accuracy))
Example #2
0
from sklearn import tree
import os
import utilities as util
import pandas as pd
import numpy as np

os.chdir('E:/decision-trees')

tamu = pd.read_csv("tamu.txt", sep=' ', header=None)

#explore the dataframe
tamu.shape
tamu.info()

X = np.array(tamu[[1, 0]])
y = np.array(tamu[2])

util.plot_data(X, y)

tree_estimator = tree.DecisionTreeClassifier(random_state=2017, max_depth=1)
tree_estimator.fit(X, y)
util.plot_decision_boundary(lambda x: tree_estimator.predict(x), X, y)
import graphviz
pydot.find_graphviz = lambda: True

x, y = make_classification(n_samples=100,
                           n_informative=2,
                           n_features=2,
                           n_redundant=0,
                           n_clusters_per_class=1,
                           random_state=7)

y

model = Sequential()
model.add(Dense(units=1, input_shape=(2, ), activation='sigmoid'))

#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
#model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])

model.compile(optimizer='sgd',
              loss='binary_crossentropy',
              metrics=['accuracy'])
history = model.fit(x=x, y=y, verbose=0, epochs=200)

print(model.get_weights())
print(model.summary())
plot_loss_accuracy(history)
plot_decision_boundary(lambda x: model.predict(x), x, y)

y_pred = model.predict_classes(x, verbose=0)

plot_confusion_matrix(model, x, y)
Example #4
0
import graphviz
import pandas as pd
import numpy as np
from keras.utils import np_utils
pydot.find_graphviz = lambda: True

input = pd.read_csv("sample1.csv")



X_train = input.iloc[:,1:3].as_matrix()
y_train = input["Output"].as_matrix()

model1 = Sequential()
model1.add(Dense(units =1,input_shape=(2,),activation='sigmoid'))

#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
#model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])

model1.compile(optimizer='sgd', loss='binary_crossentropy', metrics=['accuracy'])
history1 = model1.fit(x = X_train,y = y_train,verbose=0,epochs=1)



print(model1.get_weights())
plot_loss_accuracy(history1)
plot_decision_boundary(lambda X_train: model1.predict(X_train),X_train,y_train)

y_pred = model1.predict_classes(X_train, verbose=0)

plot_confusion_matrix(model1,X_train,y_train)
from keras.utils import plot_model

X, y = make_circles(n_samples=1000, noise=0.05, factor=0.3, random_state=0)
#X, y = make_moons(n_samples=1000, noise=0.05, random_state=0)
plot_data(X, y)

#single perceptron model for binary classifcation
model1 = Sequential()
model1.add(Dense(1, input_shape=(2, ), activation='sigmoid'))

model1.compile('adam', 'binary_crossentropy', metrics=['accuracy'])
plot_model(model1, show_shapes=True, to_file='model1.png')

history1 = model1.fit(X, y, verbose=0, epochs=100)
plot_loss_accuracy(history1)
plot_decision_boundary(lambda x: model1.predict(x), X, y)

y_pred = model1.predict_classes(X, verbose=0)
plot_confusion_matrix(model1, X, y)

#mlp model for binary classification
model2 = Sequential()
model2.add(Dense(4, input_shape=(2, ), activation='tanh'))
model2.add(Dense(2, activation='tanh'))
model2.add(Dense(1, activation='sigmoid'))

model2.compile(Adam(lr=0.01), 'binary_crossentropy', metrics=['accuracy'])
plot_model(model2, show_shapes=True, to_file='model2.png')

history2 = model2.fit(X, y, verbose=0, epochs=50)
Example #6
0
# Visualize the data
plt.scatter(X[0, :], X[1, :], c = Y, s = 40, cmap = plt.cm.Spectral);
#plt.show()


##---------------------------------------------------------
# LOGISTIC REGRESSION
##---------------------------------------------------------

# Train a logistic regression model to fit the dataset
clf = sklearn.linear_model.LogisticRegressionCV();
clf.fit(X.T, Y.T);

# Plot the dataset with the classification boundary found
# through logistic regression
plot_decision_boundary(lambda x: clf.predict(x), X, Y)
plt.title("Logistic Regression")
#plt.show()


##---------------------------------------------------------
# SETUP NEURAL NETWORK STRUCTURE
##---------------------------------------------------------

# Arguments:    X -- input dataset of shape (input size, number of examples)
#               Y -- labels of shape (output size, number of examples)
# Returns:      n_x -- the size of the input layer
#               n_h -- the size of the hidden layer
#               n_y -- the size of the output layer
def layer_sizes(X, Y):
    n_x = np.shape(X)[0] # size of input layer