Esempio n. 1
0
    def reproduce(self, parent):
        '''
        Function for getting the best brain from last generation, mutate it and
        prepere for snakes children.
        Crossover function crosses two matrices randomly (chance for every value
        is 50%).
        Then the mutation is applied with chance equal to mutate rate
        '''
        child = NeuralNetwork(inp_nodes=parent.inp_nodes,
                              hid_nodes=parent.hid_nodes,
                              out_nodes=parent.out_nodes)

        child.weigths_input = crossover(child.weigths_input,
                                        parent.weigths_input,
                                        self.crossover_rate)
        child.weights_hidden = crossover(child.weights_hidden,
                                         parent.weights_hidden,
                                         self.crossover_rate)
        child.weigths_output = crossover(child.weigths_output,
                                         parent.weigths_output,
                                         self.crossover_rate)

        #no crossover
        #child.weigths_input = parent.weigths_input[:]
        #child.weights_hidden = parent.weights_hidden[:]
        #child.weigths_output = parent.weigths_output[:]

        child.weigths_input = mutate(self.mutation_rate, child.weigths_input)
        child.weights_hidden = mutate(self.mutation_rate, child.weights_hidden)
        child.weigths_output = mutate(self.mutation_rate, child.weigths_output)

        return child
Esempio n. 2
0
    def __init__(self, file_path):
        self.__file_path = file_path
        self.__network = None
        self.__train_data = []
        self.__train_class = []
        self.__test_data = []
        self.__test_class = []
        self.__outs = []
        self.__gradients = []
        self.__bias = []
        self.__learning_rate = 0.023
        self.__network = NeuralNetwork(self.__learning_rate)

        for i in range(len(self.__network.return_arch())):
            self.__outs.append(np.zeros([self.__network.return_arch()[i]]))

        for i in range(1, len(self.__network.return_arch())):
            self.__gradients.append(np.zeros(self.__network.return_arch()[i]))

        for i in range(1, len(self.__network.return_arch())):
            self.__bias.append(
                np.random.randn(self.__network.return_arch()[i]) * 0.1)

        if self.__check_file():
            data_frame = pd.read_csv(self.__file_path, header=None)[:100]
            data_frame = data_frame.sample(frac=1).reset_index(drop=True)
            classes = data_frame[4]
            clss = data_frame[4][0]
            y = data_frame.iloc[:80, 0:3].values
            X = data_frame.iloc[80:, 0:3].values

            for row, cls in zip(y, classes[:80]):
                self.__train_data.append(list(row))
                self.__train_class.append([int(cls == clss)])

            self.__data = self.__train_data
            self.__classes = self.__train_class
            self.__train_data = np.matrix(self.__train_data)
            self.__train_class = np.matrix(self.__train_class)

            for row, cls in zip(X, classes[80:]):
                self.__test_data.append(list(row))
                self.__test_class.append(int(cls == clss))

            # self.__network = NeuralNetwork(0.1)
            self.__start(250, data_frame[80:])
        else:
            print("TypeError; file must have .csv type")
            exit(1)
Esempio n. 3
0
    def __init__(self, replay=False, runId=0, load_pop=False, selection_rate=0.1, mutation_rate=0.01, population_size=100,
                 random_weight_range=1.0, max_generations=100, show_graphics=True, save_population=False, save_best=False,
                 save_graph=True, games_to_show=25, grid_count=30, grid_size=10):
        # Set parameters
        self.selection_rate = selection_rate
        self.mutation_rate = mutation_rate
        self.population_size = population_size
        self.random_weight_range = random_weight_range
        self.max_generations = max_generations
        self.show_graphics = show_graphics
        self.save_population = save_population
        self.save_best = save_best
        self.save_graph = save_graph
        self.games_to_show = games_to_show
        self.grid_count = grid_count
        self.grid_size = grid_size

        # Get the initial neural network model
        # TODO: Have option to read from a file
        self.model = NeuralNetwork(input_shape=16, action_space=4).model
        pg.init()
        # Stuff for reading the file
        runFile = open("./runs/run.txt", 'r')
        self.overallRun = int(runFile.read(1))
        if not replay:
            if load_pop:
                population = self.loadPopulation(runId)
                if not population:
                    print("ERROR, POPULATION FILE NOT FOUND")
                    return
            else:
                population = self.createInitialPopulation()
            runFile.close()
            # Create the initial population
            self.runGenetics(population)
            runFile = open("./runs/run.txt", 'w')
            runFile.write(str(self.overallRun + 1))
            runFile.close()
        else:
            print("REPLAY NOT IMPLEMENTED YET")
Esempio n. 4
0
class Genetics:
    # Parameters for controlling the genetics
    # Current iteration of the genetics
    run = 0
    # Current generation
    generation = 0
    # Number of individuals that will be selected to breed (default = 0.1)
    selection_rate = 0.1
    # Chance that a gene will mutate (default = 0.01)
    mutation_rate = 0.01
    # Size of the population (default = 100)
    population_size = 100
    # Range of weights (default = 1.0)
    random_weight_range = 1.0
    # Number of generations to run (default = 100)
    max_generations = 100
    # Display the graphics or not (default = True)
    show_graphics = True
    # If true, will save the last generation that can be loaded and started from later (default = False)
    save_population = False
    # If true, will save the best individual from every generation (default = False)
    save_best = False
    # If true, will save the graph at the end to a png  (default = False)
    save_graph = True
    # List that stores the average score of every generation
    generationScores = []
    # Generation max scores
    generationMaxScores = []

    def __init__(self, replay=False, runId=0, load_pop=False, selection_rate=0.1, mutation_rate=0.01, population_size=100,
                 random_weight_range=1.0, max_generations=100, show_graphics=True, save_population=False, save_best=False,
                 save_graph=True, games_to_show=25, grid_count=30, grid_size=10):
        # Set parameters
        self.selection_rate = selection_rate
        self.mutation_rate = mutation_rate
        self.population_size = population_size
        self.random_weight_range = random_weight_range
        self.max_generations = max_generations
        self.show_graphics = show_graphics
        self.save_population = save_population
        self.save_best = save_best
        self.save_graph = save_graph
        self.games_to_show = games_to_show
        self.grid_count = grid_count
        self.grid_size = grid_size

        # Get the initial neural network model
        # TODO: Have option to read from a file
        self.model = NeuralNetwork(input_shape=16, action_space=4).model
        pg.init()
        # Stuff for reading the file
        runFile = open("./runs/run.txt", 'r')
        self.overallRun = int(runFile.read(1))
        if not replay:
            if load_pop:
                population = self.loadPopulation(runId)
                if not population:
                    print("ERROR, POPULATION FILE NOT FOUND")
                    return
            else:
                population = self.createInitialPopulation()
            runFile.close()
            # Create the initial population
            self.runGenetics(population)
            runFile = open("./runs/run.txt", 'w')
            runFile.write(str(self.overallRun + 1))
            runFile.close()
        else:
            print("REPLAY NOT IMPLEMENTED YET")
            # self.model = load_model('./runs/run{}/best/generation{}.h5'.format(runId, generationId))
            # self.replay(self.model)

    def createInitialPopulation(self):
        """
        Creates the initial population for the model to work off of
        """

        # Will be a list of weights
        population = []
        # Initial weights from the model
        initialWeights = self.model.get_weights()
        # Randomly set weight values
        for i in range(0, self.population_size):
            individual = initialWeights
            for a in range(0, len(initialWeights)):
                for b in range(0, len(initialWeights[a])):
                    for c in range(0, len(initialWeights[a][b])):
                        initialWeights[a][b][c] = self.getRandomWeight()
            population.append(copy.deepcopy(individual))

        # -------Used only for keras model
        # for i in range(0, self.population_size):
        #     individual = initialWeights
        #     for a in range(0, len(initialWeights)):
        #         a_layer = initialWeights[a]
        #         for b in range(0, len(a_layer)):
        #             b_layer = a_layer[b]
        #             if not isinstance(b_layer, np.ndarray):
        #                 initialWeights[a][b] = self.getRandomWeight()
        #                 #initialWeights[a][b] = test
        #                 continue
        #             for c in range(0, len(b_layer)):
        #                 c_layer = b_layer[c]
        #                 if not isinstance(c_layer, np.ndarray):
        #                     initialWeights[a][b][c] = self.getRandomWeight()
        #                     #initialWeights[a][b][c] = test
        #                     continue
        #    population.append(copy.deepcopy(individual))
        return population

    def runGenetics(self, population):
        """
        Runs the simulation
        """
        snake = Snake(True, self.population_size, self.generation, 0,
                      grid_size=self.grid_size, grid_count=self.grid_count, games_to_show=self.games_to_show)
        while self.generation < self.max_generations:
            snake.clear()
            # Scores for all of the populations
            scores = snake.run_generation(population, self.model, self.generation)

            # Run game for all members
            # for i in range(0, self.population_size):
            #     self.model.set_weights(population[i])
            #     scores.update(self.gameCycle(self.model, i))

            print(scores)
            self.generationScores.append(self.average(scores))

            self.generation += 1

            # Kill the bottom 90% of the population
            parents = self.killWeak(population, scores)

            if self.save_best:
                self.saveBest(parents[0])

            # Breed new ones from the top 10% of performers
            newPopulation = self.breedToFull(parents)
            population = newPopulation
            # newPopulation = self.mutate(newPopulation)
            print("Generation: {}".format(self.generation))
        # Ending things
        print(self.generationScores)
        print(self.generationMaxScores)
        x = range(0, self.max_generations)

        fig, ax = plt.subplots()
        ax.plot(x, self.generationScores, x, self.generationMaxScores)
        ax.set(xlabel='generation', ylabel='avg score', title='Generations Over Time')
        ax.grid()
        if self.save_graph:
            fig.savefig("graphs/graph{}.png".format(self.overallRun))
        plt.show()

        self.savePopulation(population)

    def killWeak(self, population, scores):
        sortedScores = sorted(scores.items(), key=operator.itemgetter(1))
        sortedScores.reverse()
        self.generationMaxScores.append(sortedScores[0][1])
        # TODO: Change later to get random amounts of 0's at end
        sortedScores = sortedScores[:int(self.population_size * self.selection_rate)]
        newPopulationIds = []
        for i in range(0, len(sortedScores)):
            newPopulationIds.append(sortedScores[i][0])
        print(newPopulationIds)

        newPopulation = []

        for i in range(0, len(newPopulationIds)):
            newPopulation.append(population[newPopulationIds[i]])
        return newPopulation

    def breedToFull(self, parents):
        newPopulation = copy.deepcopy(parents)
        i = 0
        while len(newPopulation) < self.population_size:
            # print("running: {}".format(i))
            i += 1
            rand1 = random.choice(range(0, int(self.population_size * self.selection_rate)))
            rand2 = rand1
            while rand2 == rand1:
                rand2 = random.choice(range(0, int(self.population_size * self.selection_rate)))
            parent1 = parents[rand1]
            parent2 = parents[rand2]
            if not np.array_equal(parent1, parent2):
                newPopulation.append(self.breed(parent1, parent2))
            else:
                print("same")
                pass

        return newPopulation

    def breed(self, parent1, parent2):
        """
        Breeds two parents together to get a child
        The child gets attributes from both of its parents
        """
        child = copy.deepcopy(parent1)
        # for my model
        for a in range(len(child)):
            for b in range(len(child[a])):
                for c in range(len(child[a][b])):
                    if np.random.choice((True, False), p=[self.mutation_rate, 1 - self.mutation_rate]):
                        child[a][b][c] = self.getRandomWeight()
                    elif random.choice((True, False)):
                        child[a][b][c] = copy.deepcopy(parent2[a][b][c])

        # for keras model
        # for a in range(0, len(child)):
        #     a_layer = child[a]
        #     for b in range(0, len(a_layer)):
        #         b_layer = a_layer[b]
        #         if not isinstance(b_layer, np.ndarray):
        #             if np.random.choice((True, False), p=[self.mutation_rate, 1-self.mutation_rate]):
        #                 child[a][b] = self.getRandomWeight()
        #             elif random.choice((True, False)):
        #                     child[a][b] = copy.deepcopy(parent2[a][b])
        #             continue
        #         for c in range(0, len(b_layer)):
        #             c_layer = b_layer[c]
        #             if not isinstance(c_layer, np.ndarray):
        #                 if np.random.choice((True, False), p=[self.mutation_rate, 1-self.mutation_rate]):
        #                     child[a][b][c] = self.getRandomWeight()
        #                 elif random.choice((True, False)):
        #                     child[a][b][c] = copy.deepcopy(parent2[a][b][c])
        #                 continue

        return child

    def mutate(self, population):
        """
        Mutates the population randomly based on the mutation rate
        """
        for i in range(0, len(population)):
            toMutate = population[i]
            for a in range(0, len(toMutate)):
                a_layer = toMutate[a]
                for b in range(0, len(a_layer)):
                    b_layer = a_layer[b]
                    if not isinstance(b_layer, np.ndarray):
                        if np.random.choice((True, False), p=[self.mutation_rate, 1 - self.mutation_rate]):
                            toMutate[a][b] = self.getRandomWeight()
                        continue
                    for c in range(0, len(b_layer)):
                        c_layer = b_layer[c]
                        if not isinstance(c_layer, np.ndarray):
                            if np.random.choice((True, False), p=[self.mutation_rate, 1 - self.mutation_rate]):
                                toMutate[a][b][c] = self.getRandomWeight()
                            continue
        return population

    def getRandomWeight(self):
        """
        Gets a random weight for the model
        """
        return random.uniform(-self.random_weight_range, self.random_weight_range)

    def savePopulation(self, population):
        """
        Saves the entire population
        """
        if self.save_population:
            os.makedirs('./runs/run{}/population'.format(self.overallRun), exist_ok=True)
            for i in range(len(population)):
                self.model.set_weights(population[i])
                self.model.save('./runs/run{}/population/individual{}.h5'.format(self.overallRun, i))

    def saveBest(self, best):
        """
        Saves the best model for a generation
        """
        if self.save_best:
            os.makedirs('./runs/run{}/best'.format(self.overallRun), exist_ok=True)
            self.model.set_weights(best)
            self.model.save('./runs/run{}/best/generation{}.h5'.format(self.overallRun, self.generation))

    def average(self, list):
        total = 0
        for i in range(len(list)):
            total += list[i]

        return total / len(list)

    def replay(self, model):
        """
        Displays a replay for the given model
        """
        self.gameCycle(model, -1)

    def loadPopulation(self, run):
        """
        Loads a population from a file for the specified run
        """
        # if os.path.exists('./runs/run{}/population'.format(run)):
        # initial_population = []
        # for i in range(0, self.population_size):
        #     temp_model = load_model('./runs/run{}/population/individual{}.h5'.format(run, i))
        #     initial_population.append(temp_model.get_weights())
        # return initial_population
        return []
Esempio n. 5
0
import numpy
from flask import Flask, request, abort, jsonify
from net import NeuralNetwork

# Network parameters
input_nodes = 20
hidden_nodes = 100
output_nodes = 1
learning_rate = 0.2  # Make this .02 if you reduce epochs
epochs = 10
score_threshold = 0.09  # Anything above this is considered 'yes', customer is likely to subscribe

n = NeuralNetwork(input_nodes, hidden_nodes, output_nodes, learning_rate)

from numpy import genfromtxt
training_data_list = genfromtxt('./banking-train.csv',
                                delimiter=',',
                                skip_header=1,
                                dtype=str)

# Map well known strings to numbers
job_categories = [
    'admin.', 'blue-collar', 'entrepreneur', 'housemaid', 'management',
    'retired', 'self-employed', 'services', 'student', 'technician',
    'unemployed', 'unknown'
]
marital_categories = ['divorced', 'married', 'single', 'unknown']
education_categories = [
    'basic.4y', 'basic.6y', 'basic.9y', 'high.school', 'illiterate',
    'professional.course', 'university.degree', 'unknown'
]
#!/usr/bin/python3
from parse import Parser
from preprocess import Preprocess
from net import NeuralNetwork

if __name__ == '__main__':
    p = Parser()
#    p.readCSV("xAPI-Edu-Data.csv")
    p.splitTrainTest("xAPI-Edu-Data.csv")
#    pre = Preprocess()
#    pre.createArrs("trainData.txt","testData.txt")
    nn = NeuralNetwork()
#    nn.train_neural_network()
    nn.test_neural_network()
#Toy example for predicting the number of ones in a given binary array
#eg: [0,1,1,0,0,1,0] -> 3

import numpy as np
from matplotlib import pyplot as plt
from random import randint, seed

from net import NeuralNetwork
seed(3)

N = 20
nn = NeuralNetwork(N, 1)
n_iter = 10000
losses = []
for i in range(n_iter):
    y = np.array([randint(0, N)])
    x = np.concatenate([np.ones([y[0]]), np.zeros([N - y[0]])], axis=0)
    np.random.shuffle(x)
    nn.forward(x, y)
    nn.backprop()
    loss = pow(y - nn.output, 2)[0][0]
    losses.append(loss)
    print i, loss, "acutal: {} | predict: {}".format(y[0], nn.output[0])

y = np.array([5])
x = np.concatenate([np.ones([1, y[0]]), np.zeros([1, N - y[0]])], axis=1)
nn.forward(x, y)
print "Actual output: {} | Predicted output: {}".format(y, nn.output)

plt.plot(range(len(losses)), losses)
plt.show()
Esempio n. 8
0
def create_nn_array(sizes):
    neural_nets = [
        NeuralNetwork(input_size=13, hidden_size=13, output_size=3)
        for i in range(len(sizes))
    ]
    return neural_nets
Esempio n. 9
0
import numpy as np
import matplotlib.pyplot as plt
from dataloader import parse_dataset
from genetic import EPOCHS
from net import NeuralNetwork

model = NeuralNetwork(input_size=13, hidden_size=5, output_size=3)
X_train, Y_train, X_test, Y_test = parse_dataset()
losses = 0
losses_hist = []
accuracy = []
for epoch in range(EPOCHS):
    model.train(X_train, Y_train)
    output = model.predict(X_test)
    loss = np.square(np.argmax(output, axis=1) - Y_test).mean()
    acc = len(np.where(np.argmax(output, axis=1) == Y_test)[0]) / len(Y_test)
    losses_hist.append(loss)
    accuracy.append(acc)

plt.plot([i for i in range(50)], losses_hist)
plt.title("Loss using all training data")
plt.xlabel("Epochs")
plt.ylabel("Loss")
plt.show()
Esempio n. 10
0
class Core:
    def __init__(self, file_path):
        self.__file_path = file_path
        self.__network = None
        self.__train_data = []
        self.__train_class = []
        self.__test_data = []
        self.__test_class = []
        self.__outs = []
        self.__gradients = []
        self.__bias = []
        self.__learning_rate = 0.023
        self.__network = NeuralNetwork(self.__learning_rate)

        for i in range(len(self.__network.return_arch())):
            self.__outs.append(np.zeros([self.__network.return_arch()[i]]))

        for i in range(1, len(self.__network.return_arch())):
            self.__gradients.append(np.zeros(self.__network.return_arch()[i]))

        for i in range(1, len(self.__network.return_arch())):
            self.__bias.append(
                np.random.randn(self.__network.return_arch()[i]) * 0.1)

        if self.__check_file():
            data_frame = pd.read_csv(self.__file_path, header=None)[:100]
            data_frame = data_frame.sample(frac=1).reset_index(drop=True)
            classes = data_frame[4]
            clss = data_frame[4][0]
            y = data_frame.iloc[:80, 0:3].values
            X = data_frame.iloc[80:, 0:3].values

            for row, cls in zip(y, classes[:80]):
                self.__train_data.append(list(row))
                self.__train_class.append([int(cls == clss)])

            self.__data = self.__train_data
            self.__classes = self.__train_class
            self.__train_data = np.matrix(self.__train_data)
            self.__train_class = np.matrix(self.__train_class)

            for row, cls in zip(X, classes[80:]):
                self.__test_data.append(list(row))
                self.__test_class.append(int(cls == clss))

            # self.__network = NeuralNetwork(0.1)
            self.__start(250, data_frame[80:])
        else:
            print("TypeError; file must have .csv type")
            exit(1)

    # check type of file
    def __check_file(self):
        try:
            if self.__file_path.split('.')[1] == "csv":
                return 1
            else:
                return 0
        except IndexError:
            print(
                "Error: Name of file must be whole or file doesn`t have type")
            exit(1)

    def __MSE(self, y, Y):
        return np.mean((y - Y)**2)

    def __start(self, epochs, test_frame):
        print("Train:")
        errors_ = []
        for e in range(epochs):
            self.__network.train(self.__train_data, self.__train_class,
                                 self.__outs, self.__gradients, self.__bias)
            train_loss = self.__MSE(
                self.__network.predict(np.array(self.__data)),
                np.array(self.__classes[:80]))
            errors_.append(train_loss)
            stdout.write("\rProgress: {}, Train loss: {}".format(
                str(100 * e / float(epochs))[:4],
                str(train_loss)[:5]))

        # error graph

        plt.plot(range(1, len(errors_) + 1), errors_, marker='o')
        plt.xlabel('Epochs')
        plt.ylabel('Number of misclassifications')
        plt.show()

        print(self.__network.return_weigh())

        print('\n\n')
        print("Test data")
        print(test_frame)

        print('\n\nResult:')
        for input_stat, correct_predict in zip(self.__test_data,
                                               self.__test_class):
            print("For inputs: {} predict is: ({}) {}, expected: {}".format(
                str(np.array(input_stat)),
                str(self.__network.predict_once(np.array(input_stat))),
                str(self.__network.predict_once(np.array(input_stat)) > .5),
                str(correct_predict == 1)))

        print('\n\nPregictions:')
        while True:
            print('Write your dots, i predict their class:')
            arg1 = float(input("First arg: "))
            arg2 = float(input("Second arg: "))
            arg3 = float(input("Third arg: "))

            result = self.__network.predict_once([arg1, arg2, arg3])
            print("Result: {1}({0})\n\n".format(result, result > .5))
Esempio n. 11
0
            left = randint(0, SIZE)
            top = randint(0, SIZE)
            right = randint(0, SIZE)
            bottom = randint(0, SIZE)
            if (right - left >= MIN_RECT_SIZE
                    and bottom - top >= MIN_RECT_SIZE):
                rect_found = True
                for x in range(left, right):
                    for y in range(top, bottom):
                        canvas[y][
                            x] += 1  #canvas[y][x] = 1 #for combined boxes
    return (canvas, n_rect)


canvas, y = generate_sample(1)
nn = NeuralNetwork(canvas.size, 1)
nn.set_learning_rate(0.01)
n_iter = 30000
losses = []
for i in range(n_iter):
    n_rect = randint(0, MAX_N_RECT)
    canvas, y = generate_sample(n_rect)
    x = np.concatenate(canvas)
    y = np.array([y])
    nn.forward(x, y)
    nn.backprop()
    loss = pow(y - nn.output, 2)[0][0]
    losses.append(loss)
    print "{}|\tloss: {} \tacutal/predicted:\t{} / {}".format(
        i, loss, y[0], nn.output[0][0])
Esempio n. 12
0
 def first_generation(self):
     '''Function creating first generation of snakes
     '''
     self.cpus = []
     for i in range(self.population_size):
         self.cpus.append((self.new_snake(NeuralNetwork(24, 8, 4))))