Beispiel #1
0
def test_plot_fitness():
    """ Tests plot_fitness function """
    logplot.clear_logs('test')
    logplot.plot_fitness('test', [0, 1, 2])
    assert os.path.isfile(logplot.get_log_folder('test') +  '/Fitness.png')
    try:
        shutil.rmtree(logplot.get_log_folder('test'))
    except FileNotFoundError:
        pass
 def write_table(self):
     """
     Writes table contents to a file
     """
     with open(logplot.get_log_folder(self.log_name) + '/hash_log.txt', "w") as f:
         for node in filter(lambda x: x is not None, self.buckets):
             while node is not None:
                 f.writelines("key: " + str(node.key) + \
                              ", value: " + str(node.value) + \
                              ", count: " + str(len(node.value)) + "\n")
                 node = node.next
     f.close()
    def load(self):
        """
        Loads hash table information.
        """
        with open(logplot.get_log_folder(self.log_name) + '/hash_log.txt', 'r') as f:
            lines = f.read().splitlines()

            for i in range(0, len(lines)):
                individual = lines[i]
                individual = individual[5:].split(", value: ")
                key = ast.literal_eval(individual[0])
                individual = individual[1].split(", count: ")
                values = individual[0][1:-1].split(", ") #Remove brackets and split multiples
                for value in values:
                    self.insert(key, float(value))
Beispiel #4
0
def test_plot_learning_curves():
    """ Tests plot_learning_curves function """
    try:
        os.remove('behavior_tree_learning/tests/test.pdf')
    except FileNotFoundError:
        pass

    logplot.clear_logs('test')
    logplot.log_best_fitness('test', [1, 2, 3, 4, 5])
    logplot.log_n_episodes('test', [5, 10, 15, 20, 25])

    parameters = logplot.PlotParameters()
    parameters.path = 'behavior_tree_learning/tests/test.pdf'
    parameters.extrapolate_y = False
    parameters.plot_mean = False
    parameters.plot_std = False
    parameters.plot_ind = False
    parameters.save_fig = False
    parameters.x_max = 0
    parameters.plot_horizontal = True
    logplot.plot_learning_curves(['test'], parameters)
    assert not os.path.isfile('behavior_tree_learning/tests/test.pdf')

    parameters.extrapolate_y = True
    parameters.plot_mean = True
    parameters.plot_std = True
    parameters.plot_ind = True
    parameters.save_fig = True
    parameters.x_max = 100
    parameters.plot_horizontal = True
    parameters.save_fig = True
    logplot.plot_learning_curves(['test'], parameters)
    assert os.path.isfile('behavior_tree_learning/tests/test.pdf')
    os.remove('behavior_tree_learning/tests/test.pdf')

    parameters.x_max = 10
    parameters.plot_horizontal = False
    logplot.plot_learning_curves(['test'], parameters)
    assert os.path.isfile('behavior_tree_learning/tests/test.pdf')

    os.remove('behavior_tree_learning/tests/test.pdf')
    try:
        shutil.rmtree(logplot.get_log_folder('test'))
    except FileNotFoundError:
        pass
Beispiel #5
0
def run(environment, gp_par, hotstart=False, baseline=None):
    # pylint: disable=too-many-statements, too-many-locals, too-many-branches
    """
    Runs the genetic programming algorithm
    """
    hash_table = HashTable(gp_par.hash_table_size, gp_par.log_name)

    if hotstart:
        best_fitness, n_episodes, last_generation, population = load_state(
            gp_par.log_name, hash_table)
    else:
        population = create_population(gp_par.n_population,
                                       gp_par.ind_start_length)
        logplot.clear_logs(gp_par.log_name)
        best_fitness = []
        n_episodes = []
        n_episodes.append(hash_table.n_values)
        last_generation = 0

        if baseline is not None:
            population[0] = baseline
            baseline_index = 0

    fitness = []
    for individual in population:
        fitness.append(
            get_fitness(individual, hash_table, environment, rerun=0))

    if not hotstart:
        best_fitness.append(max(fitness))

        if gp_par.verbose:
            print_population(population, fitness, last_generation)
            print("Generation: ", last_generation, " Best fitness: ",
                  best_fitness[-1])

        logplot.log_fitness(gp_par.log_name, fitness)
        logplot.log_population(gp_par.log_name, population)

    generation = gp_par.n_generations - 1  #In case loop is skipped due to hotstart
    for generation in range(last_generation + 1, gp_par.n_generations):
        if gp_par.keep_baseline:
            if baseline is not None and not baseline in population:
                population.append(
                    baseline
                )  #Make sure we are always able to source from baseline

        if generation > 1:
            fitness = []
            for index, individual in enumerate(population):
                fitness.append(
                    get_fitness(individual, hash_table, environment,
                                gp_par.rerun_fitness))
                if baseline is not None and individual == baseline:
                    baseline_index = index
        if gp_par.keep_baseline and gp_par.boost_baseline and baseline is not None:
            baseline_fitness = fitness[baseline_index]
            fitness[baseline_index] = max(fitness)

        co_parents = crossover_parent_selection(population, fitness, gp_par)
        co_offspring = crossover(population, co_parents, gp_par)
        for offspring in co_offspring:
            fitness.append(
                get_fitness(offspring, hash_table, environment,
                            gp_par.rerun_fitness))

        if gp_par.boost_baseline and gp_par.boost_baseline_only_co and baseline is not None:
            fitness[
                baseline_index] = baseline_fitness  #Restore original fitness for survivor selection

        mutation_parents = mutation_parent_selection(population, fitness,
                                                     co_parents, co_offspring,
                                                     gp_par)
        mutated_offspring = mutation(population + co_offspring,
                                     mutation_parents, gp_par)
        for offspring in mutated_offspring:
            fitness.append(
                get_fitness(offspring, hash_table, environment,
                            gp_par.rerun_fitness))

        if gp_par.boost_baseline and baseline is not None:
            fitness[
                baseline_index] = baseline_fitness  #Restore original fitness for survivor selection

        population, fitness = survivor_selection(population, fitness,
                                                 co_offspring,
                                                 mutated_offspring, gp_par)

        best_fitness.append(max(fitness))
        n_episodes.append(hash_table.n_values)

        logplot.log_fitness(gp_par.log_name, fitness)
        logplot.log_population(gp_par.log_name, population)

        if gp_par.verbose:
            print("Generation: ", generation, "Fitness: ", fitness,
                  "Best fitness: ", best_fitness[generation])

        if (
                generation + 1
        ) % 25 == 0 and generation < gp_par.n_generations - 1:  #Last generation will be saved later
            save_state(gp_par, population, None, best_fitness, n_episodes,
                       baseline, generation, hash_table)

    print("\nFINAL POPULATION: ")
    print_population(population, fitness, generation)

    best_individual = selection(population, fitness, 1,
                                SelectionMethods.ELITISM)[0]

    save_state(gp_par, population, best_individual, best_fitness, n_episodes,
               baseline, generation, hash_table)

    if gp_par.plot:
        logplot.plot_fitness(gp_par.log_name, best_fitness, n_episodes)
    if gp_par.fig_best:
        environment.plot_individual(logplot.get_log_folder(gp_par.log_name),
                                    'best individual', best_individual)
    if gp_par.fig_last_gen:
        for i in range(gp_par.n_population):
            environment.plot_individual(
                logplot.get_log_folder(gp_par.log_name),
                'individual_' + str(i), population[i])

    return population, fitness, best_fitness, best_individual