def reproduce(self, parent): ''' Function for getting the best brain from last generation, mutate it and prepere for snakes children. Crossover function crosses two matrices randomly (chance for every value is 50%). Then the mutation is applied with chance equal to mutate rate ''' child = NeuralNetwork(inp_nodes=parent.inp_nodes, hid_nodes=parent.hid_nodes, out_nodes=parent.out_nodes) child.weigths_input = crossover(child.weigths_input, parent.weigths_input, self.crossover_rate) child.weights_hidden = crossover(child.weights_hidden, parent.weights_hidden, self.crossover_rate) child.weigths_output = crossover(child.weigths_output, parent.weigths_output, self.crossover_rate) #no crossover #child.weigths_input = parent.weigths_input[:] #child.weights_hidden = parent.weights_hidden[:] #child.weigths_output = parent.weigths_output[:] child.weigths_input = mutate(self.mutation_rate, child.weigths_input) child.weights_hidden = mutate(self.mutation_rate, child.weights_hidden) child.weigths_output = mutate(self.mutation_rate, child.weigths_output) return child
def __init__(self, file_path): self.__file_path = file_path self.__network = None self.__train_data = [] self.__train_class = [] self.__test_data = [] self.__test_class = [] self.__outs = [] self.__gradients = [] self.__bias = [] self.__learning_rate = 0.023 self.__network = NeuralNetwork(self.__learning_rate) for i in range(len(self.__network.return_arch())): self.__outs.append(np.zeros([self.__network.return_arch()[i]])) for i in range(1, len(self.__network.return_arch())): self.__gradients.append(np.zeros(self.__network.return_arch()[i])) for i in range(1, len(self.__network.return_arch())): self.__bias.append( np.random.randn(self.__network.return_arch()[i]) * 0.1) if self.__check_file(): data_frame = pd.read_csv(self.__file_path, header=None)[:100] data_frame = data_frame.sample(frac=1).reset_index(drop=True) classes = data_frame[4] clss = data_frame[4][0] y = data_frame.iloc[:80, 0:3].values X = data_frame.iloc[80:, 0:3].values for row, cls in zip(y, classes[:80]): self.__train_data.append(list(row)) self.__train_class.append([int(cls == clss)]) self.__data = self.__train_data self.__classes = self.__train_class self.__train_data = np.matrix(self.__train_data) self.__train_class = np.matrix(self.__train_class) for row, cls in zip(X, classes[80:]): self.__test_data.append(list(row)) self.__test_class.append(int(cls == clss)) # self.__network = NeuralNetwork(0.1) self.__start(250, data_frame[80:]) else: print("TypeError; file must have .csv type") exit(1)
def __init__(self, replay=False, runId=0, load_pop=False, selection_rate=0.1, mutation_rate=0.01, population_size=100, random_weight_range=1.0, max_generations=100, show_graphics=True, save_population=False, save_best=False, save_graph=True, games_to_show=25, grid_count=30, grid_size=10): # Set parameters self.selection_rate = selection_rate self.mutation_rate = mutation_rate self.population_size = population_size self.random_weight_range = random_weight_range self.max_generations = max_generations self.show_graphics = show_graphics self.save_population = save_population self.save_best = save_best self.save_graph = save_graph self.games_to_show = games_to_show self.grid_count = grid_count self.grid_size = grid_size # Get the initial neural network model # TODO: Have option to read from a file self.model = NeuralNetwork(input_shape=16, action_space=4).model pg.init() # Stuff for reading the file runFile = open("./runs/run.txt", 'r') self.overallRun = int(runFile.read(1)) if not replay: if load_pop: population = self.loadPopulation(runId) if not population: print("ERROR, POPULATION FILE NOT FOUND") return else: population = self.createInitialPopulation() runFile.close() # Create the initial population self.runGenetics(population) runFile = open("./runs/run.txt", 'w') runFile.write(str(self.overallRun + 1)) runFile.close() else: print("REPLAY NOT IMPLEMENTED YET")
import numpy from flask import Flask, request, abort, jsonify from net import NeuralNetwork # Network parameters input_nodes = 20 hidden_nodes = 100 output_nodes = 1 learning_rate = 0.2 # Make this .02 if you reduce epochs epochs = 10 score_threshold = 0.09 # Anything above this is considered 'yes', customer is likely to subscribe n = NeuralNetwork(input_nodes, hidden_nodes, output_nodes, learning_rate) from numpy import genfromtxt training_data_list = genfromtxt('./banking-train.csv', delimiter=',', skip_header=1, dtype=str) # Map well known strings to numbers job_categories = [ 'admin.', 'blue-collar', 'entrepreneur', 'housemaid', 'management', 'retired', 'self-employed', 'services', 'student', 'technician', 'unemployed', 'unknown' ] marital_categories = ['divorced', 'married', 'single', 'unknown'] education_categories = [ 'basic.4y', 'basic.6y', 'basic.9y', 'high.school', 'illiterate', 'professional.course', 'university.degree', 'unknown' ]
#!/usr/bin/python3 from parse import Parser from preprocess import Preprocess from net import NeuralNetwork if __name__ == '__main__': p = Parser() # p.readCSV("xAPI-Edu-Data.csv") p.splitTrainTest("xAPI-Edu-Data.csv") # pre = Preprocess() # pre.createArrs("trainData.txt","testData.txt") nn = NeuralNetwork() # nn.train_neural_network() nn.test_neural_network()
#Toy example for predicting the number of ones in a given binary array #eg: [0,1,1,0,0,1,0] -> 3 import numpy as np from matplotlib import pyplot as plt from random import randint, seed from net import NeuralNetwork seed(3) N = 20 nn = NeuralNetwork(N, 1) n_iter = 10000 losses = [] for i in range(n_iter): y = np.array([randint(0, N)]) x = np.concatenate([np.ones([y[0]]), np.zeros([N - y[0]])], axis=0) np.random.shuffle(x) nn.forward(x, y) nn.backprop() loss = pow(y - nn.output, 2)[0][0] losses.append(loss) print i, loss, "acutal: {} | predict: {}".format(y[0], nn.output[0]) y = np.array([5]) x = np.concatenate([np.ones([1, y[0]]), np.zeros([1, N - y[0]])], axis=1) nn.forward(x, y) print "Actual output: {} | Predicted output: {}".format(y, nn.output) plt.plot(range(len(losses)), losses) plt.show()
def create_nn_array(sizes): neural_nets = [ NeuralNetwork(input_size=13, hidden_size=13, output_size=3) for i in range(len(sizes)) ] return neural_nets
import numpy as np import matplotlib.pyplot as plt from dataloader import parse_dataset from genetic import EPOCHS from net import NeuralNetwork model = NeuralNetwork(input_size=13, hidden_size=5, output_size=3) X_train, Y_train, X_test, Y_test = parse_dataset() losses = 0 losses_hist = [] accuracy = [] for epoch in range(EPOCHS): model.train(X_train, Y_train) output = model.predict(X_test) loss = np.square(np.argmax(output, axis=1) - Y_test).mean() acc = len(np.where(np.argmax(output, axis=1) == Y_test)[0]) / len(Y_test) losses_hist.append(loss) accuracy.append(acc) plt.plot([i for i in range(50)], losses_hist) plt.title("Loss using all training data") plt.xlabel("Epochs") plt.ylabel("Loss") plt.show()
left = randint(0, SIZE) top = randint(0, SIZE) right = randint(0, SIZE) bottom = randint(0, SIZE) if (right - left >= MIN_RECT_SIZE and bottom - top >= MIN_RECT_SIZE): rect_found = True for x in range(left, right): for y in range(top, bottom): canvas[y][ x] += 1 #canvas[y][x] = 1 #for combined boxes return (canvas, n_rect) canvas, y = generate_sample(1) nn = NeuralNetwork(canvas.size, 1) nn.set_learning_rate(0.01) n_iter = 30000 losses = [] for i in range(n_iter): n_rect = randint(0, MAX_N_RECT) canvas, y = generate_sample(n_rect) x = np.concatenate(canvas) y = np.array([y]) nn.forward(x, y) nn.backprop() loss = pow(y - nn.output, 2)[0][0] losses.append(loss) print "{}|\tloss: {} \tacutal/predicted:\t{} / {}".format( i, loss, y[0], nn.output[0][0])
def first_generation(self): '''Function creating first generation of snakes ''' self.cpus = [] for i in range(self.population_size): self.cpus.append((self.new_snake(NeuralNetwork(24, 8, 4))))