Esempio n. 1
0
def b_3(plot=False):
    units = [1, 2, 3, 10, 20, 40]
    lrs = [0.09, 0.09, 0.1, 0.1, 0.1, 0.01]
    # lrs = [0.1, 0.1, 0.1, 0.1, 0.1, 0.1]
    for unit, lr in zip(units, lrs):
        print("\nNeural_Network")
        model = Neural_Network(len(train_data[0]), [unit],
                               activation="sigmoid")
        print(model)
        model.train(train_data,
                    train_labels,
                    max_iter=10000,
                    eeta=lr,
                    batch_size=len(train_data),
                    threshold=1e-6,
                    decay=False)
        pred = model.predict(train_data)
        train_acc = accuracy_score(train_labels, pred) * 100
        print("Train Set Accuracy: ", train_acc)

        pred = model.predict(test_data)
        test_acc = accuracy_score(test_labels, pred) * 100
        print("Test Set Accuracy: ", test_acc)
        if plot:
            plot_decision_boundary(
                model.predict, np.array(test_data), np.array(test_labels),
                "Neural_Network Test Set\n Units in Hidden layers: %s\nAccuracy: %f"
                % (str(model.hidden_layer_sizes), test_acc))
Esempio n. 2
0
def b_2(plot=False, units=[5], eeta=0.1, threshold=1e-6):
    print("\nNeural_Network")
    model = Neural_Network(len(train_data[0]), units, activation="sigmoid")
    print(model)
    model.train(train_data,
                train_labels,
                max_iter=5000,
                eeta=eeta,
                batch_size=len(train_data),
                threshold=threshold,
                decay=False)
    pred = model.predict(train_data)
    train_acc = accuracy_score(train_labels, pred) * 100
    print("Train Set Accuracy: ", train_acc)

    pred = model.predict(test_data)
    test_acc = accuracy_score(test_labels, pred) * 100
    print("Test Set Accuracy: ", test_acc)
    if plot:
        plot_decision_boundary(
            model.predict, np.array(train_data), np.array(train_labels),
            "Neural_Network Train Set\n Units in Hidden layers: %s\nAccuracy: %f"
            % (str(model.hidden_layer_sizes), train_acc))
        plot_decision_boundary(
            model.predict, np.array(test_data), np.array(test_labels),
            "Neural_Network Test Set\n Units in Hidden layers: %s\nAccuracy: %f"
            % (str(model.hidden_layer_sizes), test_acc))
Esempio n. 3
0
def main():
    train_X = np.loadtxt(
        open("./toy_data/toy_trainX.csv", "rb"), delimiter=",", skiprows=0)
    train_Y = np.loadtxt(
        open("./toy_data/toy_trainY.csv", "rb"),
        delimiter=",",
        skiprows=0,
        dtype=int)
    test_X = np.loadtxt(
        open("./toy_data/toy_testX.csv", "rb"), delimiter=",", skiprows=0)
    test_Y = np.loadtxt(
        open("./toy_data/toy_testY.csv", "rb"),
        delimiter=",",
        skiprows=0,
        dtype=int)

    model = linear_model.LogisticRegression()
    model.fit(train_X, train_Y)
    te_y_predict = model.predict(test_X)
    test_acc = accuracy_score(test_Y, te_y_predict)

    tr_y_predict = model.predict(train_X)
    train_acc = accuracy_score(train_Y, tr_y_predict)

    print('TRAINING SET ACCURACY : ' + str(train_acc))
    print('TESTING SET ACCURACY : ' + str(test_acc))

    viz.plot_decision_boundary(lambda i: model.predict(i), train_X, train_Y)
    viz.plot_decision_boundary(lambda i: model.predict(i), test_X, test_Y)
    return
def main(X, y):
    global batch_size
    hidden_layer_list = [40]  #only one hidden layer with 5 neurons
    n_features = X.shape[1]  #number of input neurons
    n_outputs = 1  #number of neurons in the output layer
    batch_size = m

    print('\nRANDOMLY INITALISING THE NEURAL NETWORK...')
    initialise_network(
        n_features, hidden_layer_list,
        n_outputs)  #this method will initialiase the neural network
    print('INITIALISATION DONE!')

    print('\nTRAINING THE NEURAL NETWORK...')
    for iters in range(max_iterations):
        idx = np.random.randint(m, size=batch_size)
        X_new = X[idx, :]
        Y_new = y[idx, :]
        # Y_new = np.array(y_with_2).reshape(m, n_outputs)

        print('\nFORWARD PROPAGATION IN ACTION...')
        A_list = forward_propagate(X_new, m)
        print('FORWARD PROPAGATION DONE!')

        print('\nBACKWARD PROPAGATION IN ACTION...')
        backpropagation(A_list, Y_new)
        print('BACKPROP DONE!')
    print('NEURAL NETWORK TRAINED!\n')

    test_x = np.loadtxt(open("./toy_data/toy_testX.csv", "rb"),
                        delimiter=",",
                        skiprows=0)
    test_y = np.loadtxt(open("./toy_data/toy_testY.csv", "rb"),
                        delimiter=",",
                        skiprows=0,
                        dtype=int)

    print('TRAINGING ACCURACY : ' + str(print_accuracy(X, y)))
    print('TESTING ACCURACY : ' + str(print_accuracy(test_x, test_y)))

    #visualising the training decision boundary
    viz.plot_decision_boundary(lambda x: predict(x), X, y)

    #visualising the training decision boundary
    viz.plot_decision_boundary(lambda x: predict(x), test_x, test_y)

    return
Esempio n. 5
0
def b_1(plot=False):
    print("\nLogistic Regression")
    model = LogisticRegression()
    model.fit(train_data, train_labels)
    pred = model.predict(train_data)
    train_acc = accuracy_score(train_labels, pred) * 100
    print("Train Set Accuracy: ", train_acc)

    pred = model.predict(test_data)
    test_acc = accuracy_score(test_labels, pred) * 100
    print("Test Set Accuracy: ", test_acc)
    if plot:
        plot_decision_boundary(
            model.predict, np.array(train_data), np.array(train_labels),
            "LogisticRegression Train Set\n Accuracy: %f" % (train_acc))
        plot_decision_boundary(
            model.predict, np.array(test_data), np.array(test_labels),
            "LogisticRegression Test Set\n Accuracy: %f" % (test_acc))
Esempio n. 6
0
		ans = -1
		ma = -1
		for i in range(layers[numlayers-1]):
			temp = O[numlayers-1][i][k]
			if(temp > ma):
				ma = temp
				ans = i

		res.append(ans)

	res=np.array(res)

	return res		


plot_decision_boundary(f,X,Y,'Train data '+str(layers[:numlayers-1]))
plot_decision_boundary(f,X2,Y2,'Test data '+str(layers[:numlayers-1]))


print("Logistic regression..")
from sklearn import linear_model
logistic=linear_model.LogisticRegression()
logistic.fit(X,Y)

Yp=logistic.predict(X)
ctrain=0
for i in range(m):
	if(Yp[i]==Y[i]):
		ctrain+=1

print("Train data accuracy={}".format(ctrain/m*100))
Esempio n. 7
0
	layers = [0 for i in weights]
	last = len(layers)-1
	for i in range(len(weights)):
		if (i>0):
			layers[i] = sigmoid(np.matmul(weights[i], layers[i-1]))
		else:
			layers[i] = sigmoid(np.matmul(weights[i], X.T))	
	output = layers[last].T
	output = output.reshape(len(output))
	output[output>0.5] = 1
	output[output<=0.5] = 0
	return output

layers_dim = [2, 5, 1]
cf, tr, te, weights = train_network_single_eg(X, y, X_test, y_test, layers_dim, len(X))
plot_decision_boundary(return_output, X, y, 'contour_train_5.png')
plot_decision_boundary(return_output, X_test, y_test, 'contour_test_')
		
# for hidden_layer in [10, 20, 40, 5, 1, 2, 3]:
# 	layers_dim = [2]+[hidden_layer]+[1]
# 	cf, tr, te, weights = train_network_single_eg(X, y, X_test, y_test, layers_dim, len(X))
# 	plot_decision_boundary(return_output, X, y, 'contour_train_'+str(hidden_layer)+'.png')
# 	plot_decision_boundary(return_output, X_test, y_test, 'contour_test_'+str(hidden_layer)+'.png')
	
# 	plt.figure()
# 	x = np.arange(1, len(tr)+1)
# 	plt.plot(x, tr, 'b-', label='Training Acc')
# 	plt.plot(x, te, 'k-', label='Testing Acc')
# 	plt.legend()
# 	plt.savefig('accuracy_'+str(hidden_layer)+'.png')
Esempio n. 8
0
lines = f.readlines()

input_size = int(lines[0][:-1])
hidden_layers_dim = lines[1][1:-2].split(', ')
hidden_layers_dim = list(map(int, hidden_layers_dim))
batch_size = int(lines[2][:-1])

layers_dim = [input_size] + hidden_layers_dim[:] + [
    1
]  #The one added since the output has exactly 1 element

#Toy Dataset
for hidden_layer in [1, 2, 3, 10, 20, 40]:
    layers_dim = [2] + [hidden_layer] + [1]
    print "Starting " + str(layers_dim)
    cf, tr, te, weights = train_network_single_eg(X, y, X_test, y_test,
                                                  layers_dim, len(X))
    print(tr)
    print(te)
    plt.figure()
    x = np.arange(1, len(tr) + 1)
    plt.plot(x, tr, 'b-', label='Training Acc')
    plt.plot(x, te, 'k-', label='Testing Acc')
    plt.legend()
    plt.savefig('1accuracy_' + str(hidden_layer) + '.png')

    plot_decision_boundary(return_output, X, y,
                           'contour_train_' + str(hidden_layer) + '.png')
    plot_decision_boundary(return_output, X_test, y_test,
                           'contour_test_' + str(hidden_layer) + '.png')
Esempio n. 9
0
layers_dim = [int(X_mnist.shape[1]), 100, 1]

#ReLU
start = timeit.default_timer()
cf, tr, te, weights = train_network_single_eg(X_mnist,
                                              y_mnist,
                                              X_mnist_test,
                                              y_mnist_test,
                                              layers_dim,
                                              100,
                                              err=1e-14)
stop = timeit.default_timer()
print(tr)
print(te)
print('Time taken - ' + str(stop - start))
plt.figure()
x = np.arange(1, len(tr) + 1)
plt.plot(x, tr, 'b-', label='Training Acc')
plt.plot(x, te, 'k-', label='Testing Acc')
plt.xlabel('# of iterations')
plt.ylabel('Accuracy')
plt.legend()
plt.savefig('accuracy_mnist_relu_14.png')

plt.figure()
plt.plot(x, cf, 'b-', label='Cost Function')
plt.legend()
plt.savefig('costf_mnist_relu_14.png')

plot_decision_boundary(return_output, X, y, 'mnist_relu_db_train.png')
plot_decision_boundary(return_output, X_test, y_test, 'mnist_relu_db_test.png')