Пример #1
0
    def fitness(self, nn_chromosome: BackPropModel, train_dataset,
                val_dataset):
        print("train")
        nn_chromosome.train(train_dataset)
        print("test")
        accuracy = nn_chromosome.test(val_dataset)

        print(accuracy)
        return accuracy
Пример #2
0
    def fitness(self, nn_chromosome, train_dataset):
        weights, biases = nn_chromosome
        new_nn = BackPropModel(self.nn.args)
        new_nn.weights = list(weights)
        new_nn.biases = list(biases)

        loss, success = new_nn.calculate_loss_and_success(train_dataset)

        fitness = loss

        return nn_chromosome, loss, success, fitness
Пример #3
0
 def mutate(self, chromosome: BackPropModel):
     parameter_to_mutate = random.randint(1, 2)
     if parameter_to_mutate == 1:
         print("mutate hidden layers")
         chromosome.args.hidden_layers_sizes = chromosome.args.choose_hidden_layers(
         )
         chromosome.layers = chromosome.args.create_layers_list()
     if parameter_to_mutate == 2:
         print("mutate f")
         chromosome.args.f, chromosome.args.df = chromosome.args.choose_activation(
         )
     return chromosome
Пример #4
0
    def write_results_to_file(self, fittest_chromosome, test_set, filename):
        weights, biases = fittest_chromosome
        new_nn = BackPropModel(self.nn.args)
        new_nn.weights = list(weights)
        new_nn.biases = list(biases)

        new_nn.write_result_to_file(test_set, filename)
Пример #5
0
    def breed_parents(self, p1: BackPropModel, p2: BackPropModel):
        # lr = random.sample([p1.args.learning_rate, p2.args.learning_rate], k=1)[0]
        # epochs = random.sample([p1.args.epochs, p2.args.epochs], k=1)[0]
        lr = 0.01
        epochs = 1
        activation, d_activation = random.sample([(p1.args.f, p1.args.df),
                                                  (p2.args.f, p2.args.df)],
                                                 k=1)[0]
        hidden_layers = self.breed_layers(list(p1.args.hidden_layers_sizes),
                                          list(p2.args.hidden_layers_sizes))

        child_args = BackpropArgs(p1.args.input_size, p1.args.output_size, lr,
                                  hidden_layers, epochs, activation,
                                  d_activation)

        return BackPropModel(child_args)
Пример #6
0
 def init_population(self):
     population = []
     for i in range(self.population_size):
         backprop_args = self.generate_network_args()
         population.append(BackPropModel(backprop_args))
     return population
Пример #7
0
 def train_best(self, best: BackPropModel, dataset):
     best.args.epochs = 1
     best.train(dataset)
     best.args.epochs = 1
Пример #8
0
    #     backProp = BackPropModel(backprop_args)
    #
    #
    #     backProp.train(train_data, val_data)
    #     print("Test Accuracy:", str(backProp.test(test_data)) + "%")
    #     print("Train Accuracy:", str(backProp.test(train_data)) + "%")

    # if part == 'b':
    print("start GA")
    learning_rate = 0.01
    hidden_layers_sizes = [128, 64]
    epochs = 1

    nn_args = BackpropArgs(input_size, output_size, learning_rate,
                           hidden_layers_sizes, epochs)
    NNModel = BackPropModel(nn_args)

    population_size = 100
    replication_rate = 0.1
    mutation_rate = 0.1
    elitism_rate = 2

    GA_args = GAArgs(population_size, replication_rate, mutation_rate,
                     elitism_rate, NNModel)
    print(GA_args.population_size, GA_args.replication_rate,
          GA_args.mutation_rate, GA_args.elitism_rate)

    GA = GAModel(GA_args)

    random.shuffle(train_data)