Пример #1
0
    def start_cycle(self, one_by_one=False):
        """Start the cycle.

        Keyword arguments:
        one_by_one -- evaluate the genomes one by one if True (default False)
        """
        if one_by_one:
            # play each genome in a game alone.
            for generation in range(self.generations):
                # retrieve genome list and call evaluation function for each one.
                genome_list = NEAT.GetGenomeList(self.population)
                best_fitness = 0
                print("generation", generation + 1, ":")
                print("testing " + str(self.params.PopulationSize) +
                      " genomes : ")
                i = 0
                for genome in genome_list:
                    i += 1
                    print(i, end=" ")
                    net = NEAT.NeuralNetwork()
                    genome.BuildPhenotype(net)
                    fitness = self.evaluate(genome, generation + 1, i)
                    if best_fitness < fitness:
                        best_fitness = fitness
                    genome.SetFitness(fitness)
                # print best fitness and advance to the next generation
                print("best fitness : ", best_fitness)
                print("=======================================")
                self.population.Epoch()
        else:
            # play all of the population at the same time
            for generation in range(self.generations):
                # retrieve genome list and build the players list.
                genome_list = NEAT.GetGenomeList(self.population)
                players = list()
                for genome in genome_list:
                    net = NEAT.NeuralNetwork()
                    genome.BuildPhenotype(net)
                    players.append(Dino_player_neat(net))
                # start game and retrieve fitness list.
                the_game = Dino_NEAT(players, generation)
                fitness = the_game.on_execute()
                if fitness is None:
                    print("Training stopped.")
                    break
                # assign each genome to its corresponding fitness.
                best_fitness = 0
                for i in range(len(fitness)):
                    genome = genome_list[i]
                    if best_fitness < fitness[i]:
                        best_fitness = fitness[i]
                    genome.SetFitness(fitness[i])
                # print best fitness and advance to the next generation
                print("generation", generation, ":", best_fitness)
                self.population.Epoch()
Пример #2
0
def evolve():
    g = NEAT.Genome(0, 3, 0, 1, False, NEAT.ActivationFunction.UNSIGNED_SIGMOID,
                    NEAT.ActivationFunction.UNSIGNED_SIGMOID, 0, params, 0)
    pop = NEAT.Population(g, params, True, 1.0, 1)
    pop.RNG.Seed(int(time.clock()*100))

    generations = 0
    for generation in range(1000):
        genome_list = NEAT.GetGenomeList(pop)
        fitness_list = []
        for genome in genome_list:
            fitness_list.append(evaluate(genome))
        NEAT.ZipFitness(genome_list, fitness_list)
        pop.Epoch()
        generations = generation
        best = max(fitness_list)
        bestG = pop.GetBestGenome()
        plot_nn(bestG)
        plt.pause(0.001)
        plt.ion()
        plt.show(block=False)
        print("Mejor fitness [",generation,"]: ",best)
        if best > 15.9:
            break

    return generations
Пример #3
0
def getbest(i):
    g = NEAT.Genome(0, 3, 0, 1, False, NEAT.ActivationFunction.UNSIGNED_SIGMOID,
                    NEAT.ActivationFunction.UNSIGNED_SIGMOID, 0, params, 0)
    pop = NEAT.Population(g, params, True, 1.0, i)
    # pop.RNG.Seed(int(time.clock()*100))
    pop.RNG.Seed(1234)

    generations = 0
    for generation in range(max_generations):
        genome_list = NEAT.GetGenomeList(pop)
        fitness_list = EvaluateGenomeList_Serial(genome_list, evaluate, display=False)
        # fitness_list = EvaluateGenomeList_Parallel(genome_list, evaluate, display=False)
        NEAT.ZipFitness(genome_list, fitness_list)
        pop.Epoch()
        generations = generation
        best = max(fitness_list)
        if best > 15.0:
            break

    net = NEAT.NeuralNetwork()
    pop.GetBestGenome().BuildPhenotype(net)

    # img = NEAT.viz.Draw(net)
    # cv2.imshow("current best", img)
    # cv2.waitKey(1)
    
    return generations, net.NumHiddenNeurons(), net.NumConnections()
def evaluate_obj_functions(obj_function, generation):
    """
    The function to perform evaluation of the objective functions population
    Arguments:
        obj_function:   The population of objective functions
        generation:     The current generation of evolution
    """
    obj_func_coeffs = []
    n_items_list = []
    # evaluate objective function genomes and collect novelty items
    obj_func_genomes = NEAT.GetGenomeList(obj_function.population)
    for genome in obj_func_genomes:
        n_item = evaluate_individ_obj_function(genome=genome,
                                               generation=generation)
        n_items_list.append(n_item)
        obj_func_coeffs.append(n_item.data)

    # evaluate collected novelty items and set genomes fitness scores
    max_fitness = 0
    for i, genome in enumerate(obj_func_genomes):
        fitness = obj_function.archive.evaluate_novelty_score(
            item=n_items_list[i], n_items_list=n_items_list)
        genome.SetFitness(fitness)
        max_fitness = max(max_fitness, fitness)

    return obj_func_coeffs, max_fitness
Пример #5
0
def getbest(i):
    g = NEAT.Genome(0, substrate.GetMinCPPNInputs(), 0,
                    substrate.GetMinCPPNOutputs(), False,
                    NEAT.ActivationFunction.TANH, NEAT.ActivationFunction.TANH,
                    0, params)

    pop = NEAT.Population(g, params, True, 1.0, i)
    pop.RNG.Seed(i)

    for generation in range(2000):
        genome_list = NEAT.GetGenomeList(pop)
        fitnesses = NEAT.EvaluateGenomeList_Serial(genome_list,
                                                   evaluate,
                                                   display=False)
        [
            genome.SetFitness(fitness)
            for genome, fitness in zip(genome_list, fitnesses)
        ]

        best = max([x.GetLeader().GetFitness() for x in pop.Species])

        pop.Epoch()
        generations = generation
        if best > 15.0:
            break

    return generations
Пример #6
0
    def single_generation(self):
        """
        Single generation of evaluation for a population on an environment. 
        
        :return: performance measure (i.e. fitness).
        """

        # Retrieve a list of all genomes in the population

        genome_list = mneat.GetGenomeList(self.alg.pop)

        # Main Population Evaluation Loop

        for current_genome in genome_list:

            # Evaluate the current genome

            fitness = self.evaluate_agent(current_genome)

            # Reset the current genome's fitness

            current_genome.SetFitness(fitness)

        # Call a new Epoch - runs mutation and crossover, creating offspring

        self.alg.pop.Epoch()
Пример #7
0
def getbest(i):
    g = NEAT.Genome(0, substrate.GetMinCPPNInputs(), 2,
                    substrate.GetMinCPPNOutputs(), False,
                    NEAT.ActivationFunction.TANH, NEAT.ActivationFunction.TANH,
                    0, params)

    pop = NEAT.Population(g, params, True, 1.0, i)
    pop.RNG.Seed(i)

    for generation in range(2000):
        genome_list = NEAT.GetGenomeList(pop)
        # if sys.platform == 'linux':
        #    fitnesses = EvaluateGenomeList_Parallel(genome_list, evaluate, display=False)
        # else:
        fitnesses = EvaluateGenomeList_Serial(genome_list,
                                              evaluate,
                                              display=False)
        [
            genome.SetFitness(fitness)
            for genome, fitness in zip(genome_list, fitnesses)
        ]

        print('Gen: %d Best: %3.5f' % (generation, max(fitnesses)))

        best = max(fitnesses)

        pop.Epoch()
        generations = generation

        if best > 15.0:
            break

    return generations
Пример #8
0
    def test_multi_neat(self):
        params = NEAT.Parameters()
        params.PopulationSize = 100
        genome = NEAT.Genome(
            0,  # ID
            3,  # number of inputs. Note: always add one extra input, for bias
            0,  # number of hidden nodes
            2,  # number of outputs
            False,  # FS_NEAT; auto-determine an appropriate set of inputs for the evolved networks
            NEAT.ActivationFunction.UNSIGNED_SIGMOID,  # OutputActType
            NEAT.ActivationFunction.UNSIGNED_SIGMOID,  # HiddenActType
            0,  # SeedType
            params  # Parameters
        )
        seed = 42
        pop = NEAT.Population(
            genome,
            params,
            True,  # whether the population should be randomized
            1.0,  # how much the population should be randomized,
            seed)

        for generation in range(3):
            # retrieve a list of all genomes in the population
            genome_list = NEAT.GetGenomeList(pop)

            # apply the evaluation function to all genomes
            for genome in genome_list:
                fitness = self.evaluate(genome)
                genome.SetFitness(fitness)

            # advance to the next generation
            pop.Epoch()
Пример #9
0
def getbest(i):

    g = NEAT.Genome(0, 3, 0, 1, False,
                    NEAT.ActivationFunction.UNSIGNED_SIGMOID,
                    NEAT.ActivationFunction.UNSIGNED_SIGMOID, 0, params)
    pop = NEAT.Population(g, params, True, 1.0, i)
    pop.RNG.Seed(i)

    generations = 0
    for generation in range(1000):
        genome_list = NEAT.GetGenomeList(pop)
        fitness_list = NEAT.EvaluateGenomeList_Serial(genome_list,
                                                      evaluate,
                                                      display=False)
        NEAT.ZipFitness(genome_list, fitness_list)

        best = max([x.GetLeader().GetFitness() for x in pop.Species])

        pop.Epoch()

        generations = generation
        if best > 15.0:
            break

    return generations
Пример #10
0
def getbest():

    g = NEAT.Genome(0, 3, 0, 1, False,
                    NEAT.ActivationFunction.UNSIGNED_SIGMOID,
                    NEAT.ActivationFunction.UNSIGNED_SIGMOID, 0, params)
    pop = NEAT.Population(g, params, True, 1.0)

    generations = 0
    for generation in range(1000):
        genome_list = NEAT.GetGenomeList(pop)
        fitness_list = NEAT.EvaluateGenomeList_Serial(genome_list,
                                                      evaluate,
                                                      display=False)
        NEAT.ZipFitness(genome_list, fitness_list)

        best = max([x.GetLeader().GetFitness() for x in pop.Species])
        #        print 'Best fitness:', best, 'Species:', len(pop.Species)

        # test
        net = NEAT.NeuralNetwork()
        pop.Species[0].GetLeader().BuildPhenotype(net)
        img = np.zeros((250, 250, 3), dtype=np.uint8)
        img += 10
        NEAT.DrawPhenotype(img, (0, 0, 250, 250), net)

        cv2.imshow("nn_win", img)
        cv2.waitKey(1)

        pop.Epoch()
        #        print "Generation:", generation
        generations = generation
        if best > 15.5:
            break

    return generations
def run_experiment(config_file, trial_id, n_generations, out_dir, view_results=False, save_results=True):
    """
    The function to run the experiment against hyper-parameters 
    defined in the provided configuration file.
    The winner genome will be rendered as a graph as well as the
    important statistics of neuroevolution process execution.
    Arguments:
        config_file:    The path to the file with experiment 
                        configuration
        trial_id:       The ID of current trial
        n_generations:  The number of evolutionary generations
        out_dir:        The directory to save intermediate results.
        view_results:   The flag to control if intermediate results should be displayed after each trial
        save_results:   The flag to control whether intermediate results should be saved after each trial.
    Returns:
        The tuple (solution_found, generation, complexity, best_genome_fitness) that has flag indicating whether
        solution was found, the generation when solution was found, the complextity of best genome, and the fitness
        of best genome.
    """
    g = NEAT.Genome(0, 4+1, 0, 1+1, False, NEAT.ActivationFunction.TANH, 
                NEAT.ActivationFunction.TANH, 0, params, 0)
    pop = NEAT.Population(g, params, True, 1.0, trial_id)

     # set random seed
    seed = int(time.time())
    pop.RNG.Seed(seed)

    generations = 0
    solved = False
    best_trial_fitness = 0
    best_trial_complexity = 0
    for generation in range(n_generations):
        genome_list = NEAT.GetGenomeList(pop)
        fitness_list = EvaluateGenomeList_Serial(genome_list, evaluate, display=view_results)
        NEAT.ZipFitness(genome_list, fitness_list)
        generations = generation
        best = max(genome_list, key=get_fitness)
        best_fitness = best.GetFitness()
        complexity = best.NumNeurons() + best.NumLinks()
        solved = best_fitness >= cart.MAX_FITNESS # Changed to correspond limit used with other tested libraries
        if solved:
            best_trial_fitness = best_fitness
            best_trial_complexity = complexity
            print("Trial: %2d\tgeneration: %d\tfitness: %f\tcomplexity: %d\tseed: %d" % 
                    (trial_id, generations, best_trial_fitness, complexity, seed))
            break
        # check if best fitness in this generation is better than current maximum
        if best_fitness > best_trial_fitness:
            best_trial_complexity = complexity
            best_trial_fitness = best_fitness

        # move to the next epoch
        pop.Epoch()
            
    if not solved:
        print("Trial: %2d\tFAILED\t\tfitness: %f\tcomplexity: %d\tseed: %d" % 
                (trial_id, best_trial_fitness, best_trial_complexity, seed))

    return solved, generations, best_trial_complexity, best_trial_fitness
Пример #12
0
    def run_neat():
        generation = 0
        while True:
            generation += 1
            for genome in NEAT.GetGenomeList(population):
                fitness = train(genome, generation=generation)
                genome.SetFitness(fitness)

            if validate != None:
                population.Epoch()
                for genome in NEAT.GetGenomeList(population):
                    fitness = train(genome, generation=generation)
                    genome.SetFitness(fitness)
                current_best = pickle.dumps(population.GetBestGenome())
            else:
                current_best = pickle.dumps(population.GetBestGenome())
                population.Epoch()

            yield generation, current_best
Пример #13
0
 def sparseness(genome):
     distances = []
     for g in NEAT.GetGenomeList(pop):
         d = genome.behavior.distance_to(g.behavior)
         distances.append(d)
     # get the distances from the archive as well
     for ab in archive:
         distances.append(genome.behavior.distance_to(ab))
     distances = sorted(distances)
     sp = np.mean(distances[1:ns_K + 1])
     return sp
Пример #14
0
def evolve():

    global generations
    global global_best
    global rrse_list
    global mae_list
    global rows
    global cols

    # print("LOO Validation:")
    g = NEAT.Genome(0, (cols - 1), (3), 1, False,
                    NEAT.ActivationFunction.LINEAR,
                    NEAT.ActivationFunction.LINEAR, 1, params, 1)
    for test_idx in range(rows):
        pop = NEAT.Population(g, params, True, 1.0, 0)
        pop.RNG.Seed(int(time.clock() * 100))
        generations = 0
        global_best = -99999999
        no_improvement = 0
        # Run for a maximum of N generations
        while no_improvement < 7 and generations < 100:  #TODO: make max gens into variable and set via command line
            # Reset the population if this path does not seem promising
            if (generations > 7 and global_best < -150):
                pop = NEAT.Population(g, params, True, 1.0, 0)
                pop.RNG.Seed(int(time.clock() * 100))
                generations = 0
                global_best = -99999999
                no_improvement = 0

            genome_list = NEAT.GetGenomeList(pop)
            fitness_list = []
            for genome in genome_list:
                fitness_list.append(evaluate(genome, test_idx))
            NEAT.ZipFitness(genome_list, fitness_list)
            pop.Epoch()
            generations += 1
            best = max(fitness_list)
            #print("[ROW:", test_idx, "] ", -global_best, " (", no_improvement, " g. of no improvement)")
            if best > global_best:
                no_improvement = 0
                global_best = best
            else:
                no_improvement += 1

        #print("LOO test error (RRSE):")
        #print(rrse_list[test_idx])
        #print("LOO test error (MAE):")
        #print(mae_list[test_idx])

    print(rrse_list)
    print(mae_list)
    avg_rrse = np.mean(rrse_list)
    avg_mae = np.mean(mae_list)
    return [avg_rrse, avg_mae]
Пример #15
0
def getbest(run):
    g = NEAT.Genome(0, substrate.GetMinCPPNInputs(), 0,
                    substrate.GetMinCPPNOutputs(), False,
                    NEAT.ActivationFunction.TANH, NEAT.ActivationFunction.TANH,
                    0, params)

    pop = NEAT.Population(g, params, True, 1.0, run)
    for generation in range(1000):
        # Evaluate genomes
        genome_list = NEAT.GetGenomeList(pop)

        fitnesses = EvaluateGenomeList_Serial(genome_list,
                                              evaluate_xor,
                                              display=False)
        [
            genome.SetFitness(fitness)
            for genome, fitness in zip(genome_list, fitnesses)
        ]

        print('Gen: %d Best: %3.5f' % (generation, max(fitnesses)))

        # Print best fitness
        # print("---------------------------")
        # print("Generation: ", generation)
        # print("max ", max([x.GetLeader().GetFitness() for x in pop.Species]))

        # Visualize best network's Genome

        net = NEAT.NeuralNetwork()
        pop.Species[0].GetLeader().BuildPhenotype(net)
        img = np.zeros((500, 500, 3), dtype=np.uint8)
        img += 10
        NEAT.DrawPhenotype(img, (0, 0, 500, 500), net)
        cv2.imshow("CPPN", img)
        # Visualize best network's Pheotype
        net = NEAT.NeuralNetwork()
        pop.Species[0].GetLeader().BuildESHyperNEATPhenotype(
            net, substrate, params)
        img = np.zeros((500, 500, 3), dtype=np.uint8)
        img += 10

        NEAT.DrawPhenotype(img, (0, 0, 500, 500), net, substrate=True)
        cv2.imshow("NN", img)
        cv2.waitKey(1)

        if max(fitnesses) > 15.0:
            break

        # Epoch
        generations = generation
        pop.Epoch()

    return generations
Пример #16
0
def optimize_neat(config, n_inputs, n_hidden, n_outputs, out_dir):
    print('Starting Optimization')
    params = NEAT.Parameters()
    params.PopulationSize = 60
    params.OldAgeTreshold = 10
    params.SpeciesMaxStagnation = 20
    params.AllowLoops = False

    for i in range(config['n_morphogens']):
        addTrait(params, 'K%i' % i, (.03, .08))
        addTrait(params, 'F%i' % i, (.01, .06))
        addTrait(params, 'diffU%i' % i, (.005, .02))
        addTrait(params, 'diffV%i' % i, (.0025, .01))

    ######################## Create NEAT objects ###############################
    fs_neat = False
    seed_type = 0
    out_type = NEAT.ActivationFunction.UNSIGNED_SIGMOID
    hidden_type = NEAT.ActivationFunction.UNSIGNED_SIGMOID
    genome_prototye = NEAT.Genome(0, n_inputs, n_hidden, n_outputs, fs_neat, \
                                  out_type, hidden_type, seed_type, params, 0)
    rand_seed = int(time.time())
    pop = NEAT.Population(genome_prototye, params, True, 1.0, rand_seed)

    ######################## Main evolution loop ###############################
    top_fitness = 0  # Fitness function is defined in [0, 1]
    top_grid = None

    for generation in range(config['generations']):
        print('Starting generation', generation)
        genomes = NEAT.GetGenomeList(pop)
        fitness_list = [simulate_genome(g, config)[0] for g in genomes]
        NEAT.ZipFitness(genomes, fitness_list)
        max_fitness = max(fitness_list)

        print('Generation complete')
        print('Max fitness', max_fitness)
        print('Mean fitness', np.mean(fitness_list))

        if max_fitness > top_fitness:
            print('New best fitness')
            best_genome = genomes[fitness_list.index(max_fitness)]
            _, best_grid = simulate_genome(best_genome, config)

            top_fitness = max_fitness
            top_grid = best_grid

            np.save(out_dir + '/grid_%i' % generation, best_grid)
            best_genome.Save(out_dir + '/genome_%i' % generation)

        pop.Epoch()
        print()
Пример #17
0
def simGeneration():
	genomeList = NEAT.GetGenomeList(pop)

	#make graph
	graph = mp.Map(20, 20)

	for g in genomeList:
		net = NEAT.NeuralNetwork()
		g.BuildPhenotype(net)
		fitness = evaluate(net, graph)
		g.SetFitness(fitness)

	pop.Epoch()
def evaluate_solutions(robot, obj_func_coeffs, generation):
    best_robot_genome = None
    solution_found = False
    distances = []
    n_items_list = []
    # evaluate robot genomes against maze simulation
    robot_genomes = NEAT.GetGenomeList(robot.population)
    for genome in robot_genomes:
        found, distance, n_item = evaluate_individual_solution(
            genome=genome, generation=generation, robot=robot)
        # store returned values
        distances.append(distance)
        n_items_list.append(n_item)

        if found:
            best_robot_genome = genome
            solution_found = True

    # evaluate novelty scores of robot genomes and calculate fitness
    max_fitness = 0
    best_coeffs = None
    best_distance = 1000
    best_novelty = 0
    for i, n_item in enumerate(n_items_list):
        novelty = robot.archive.evaluate_novelty_score(
            item=n_item, n_items_list=n_items_list)
        # The sanity check
        assert robot_genomes[i].GetID() == n_item.genomeId

        # calculate fitness
        fitness, coeffs = evaluate_solution_fitness(distances[i], novelty,
                                                    obj_func_coeffs)
        robot_genomes[i].SetFitness(fitness)

        if not solution_found:
            # find the best genome in population
            if max_fitness < fitness:
                max_fitness = fitness
                best_robot_genome = robot_genomes[i]
                best_coeffs = coeffs
                best_distance = distances[i]
                best_novelty = novelty
        elif best_robot_genome.GetID() == n_item.genomeId:
            # store fitness of winner solution
            max_fitness = fitness
            best_coeffs = coeffs
            best_distance = distances[i]
            best_novelty = novelty

    return best_robot_genome, solution_found, max_fitness, distances, best_coeffs, best_distance, best_novelty
Пример #19
0
def getbest(run):
    g = NEAT.Genome(0, 7, 1, True, NEAT.ActivationFunction.SIGNED_SIGMOID,
                    NEAT.ActivationFunction.SIGNED_SIGMOID, params)

    pop = NEAT.Population(g, params, True, 1.0, run)
    for generation in range(1000):
        #Evaluate genomes
        genome_list = NEAT.GetGenomeList(pop)

        fitnesses = NEAT.EvaluateGenomeList_Serial(genome_list,
                                                   evaluate_xor,
                                                   display=False)
        [
            genome.SetFitness(fitness)
            for genome, fitness in zip(genome_list, fitnesses)
        ]

        # Print best fitness
        #print("---------------------------")
        #print("Generation: ", generation)
        #print("max ", max([x.GetLeader().GetFitness() for x in pop.Species]))

        # Visualize best network's Genome
        '''
        net = NEAT.NeuralNetwork()
        pop.Species[0].GetLeader().BuildPhenotype(net)
        img = np.zeros((500, 500, 3), dtype=np.uint8)
        img += 10
        NEAT.DrawPhenotype(img, (0, 0, 500, 500), net )
        cv2.imshow("CPPN", img)
        # Visualize best network's Pheotype
        net = NEAT.NeuralNetwork()
        pop.Species[0].GetLeader().Build_ES_Phenotype(net, substrate, params)
        img = np.zeros((500, 500, 3), dtype=np.uint8)
        img += 10

        Utilities.DrawPhenotype(img, (0, 0, 500, 500), net, substrate=True )
        cv2.imshow("NN", img)
        cv2.waitKey(1)
        '''
        if max([x.GetLeader().GetFitness() for x in pop.Species]) > 15.0:
            break

        # Epoch
        generations = generation
        pop.Epoch()

    return generations
Пример #20
0
def getbest():
    g = NEAT.Genome(0, substrate.GetMinCPPNInputs(), 0,
                    substrate.GetMinCPPNOutputs(), False,
                    NEAT.ActivationFunction.SIGNED_GAUSS,
                    NEAT.ActivationFunction.SIGNED_GAUSS, 0, params)

    pop = NEAT.Population(g, params, True, 1.0)

    for generation in range(1000):
        genome_list = NEAT.GetGenomeList(pop)
        #    fitnesses = NEAT.EvaluateGenomeList_Parallel(genome_list, evaluate)
        fitnesses = NEAT.EvaluateGenomeList_Serial(genome_list,
                                                   evaluate,
                                                   display=False)
        [
            genome.SetFitness(fitness)
            for genome, fitness in zip(genome_list, fitnesses)
        ]

        best = max([x.GetLeader().GetFitness() for x in pop.Species])
        #        print 'Best fitness:', best

        # test
        net = NEAT.NeuralNetwork()
        pop.Species[0].GetLeader().BuildPhenotype(net)
        img = np.zeros((250, 250, 3), dtype=np.uint8)
        img += 10
        NEAT.DrawPhenotype(img, (0, 0, 250, 250), net)
        cv2.imshow("CPPN", img)

        net = NEAT.NeuralNetwork()
        pop.Species[0].GetLeader().BuildHyperNEATPhenotype(net, substrate)
        img = np.zeros((250, 250, 3), dtype=np.uint8)
        img += 10
        NEAT.DrawPhenotype(img, (0, 0, 250, 250), net, substrate=True)
        cv2.imshow("NN", img)

        cv2.waitKey(1)

        pop.Epoch()
        #        print "Generation:", generation
        generations = generation
        if best > 15.5:
            break

    return generations
Пример #21
0
def getbest(i):
    g = NEAT.Genome(0, 3, 0, 1, False, NEAT.ActivationFunction.UNSIGNED_SIGMOID,
                    NEAT.ActivationFunction.UNSIGNED_SIGMOID, 0, params)
    pop = NEAT.Population(g, params, True, 1.0, i)
    pop.RNG.Seed(int(time.clock()*100))

    generations = 0
    for generation in range(1000):
        genome_list = NEAT.GetGenomeList(pop)
        fitness_list = EvaluateGenomeList_Serial(genome_list, evaluate, display=False)
        NEAT.ZipFitness(genome_list, fitness_list)
        pop.Epoch()
        generations = generation
        best = max(fitness_list)
        if best > 15.0:
            break

    return generations
Пример #22
0
def getbest():
    g = NEAT.Genome(0, 7, 1, False, NEAT.ActivationFunction.SIGNED_SIGMOID,
                    NEAT.ActivationFunction.SIGNED_SIGMOID, params)

    pop = NEAT.Population(g, params, True, 1.0)

    for generation in range(2000):

        genome_list = NEAT.GetGenomeList(pop)
        #    fitnesses = NEAT.EvaluateGenomeList_Parallel(genome_list, evaluate)
        fitnesses = NEAT.EvaluateGenomeList_Serial(genome_list,
                                                   evaluate_xor,
                                                   display=True)
        [
            genome.SetFitness(fitness)
            for genome, fitness in zip(genome_list, fitnesses)
        ]

        best = max([x.GetLeader().GetFitness() for x in pop.Species])

        net = NEAT.NeuralNetwork()
        pop.Species[0].GetLeader().BuildPhenotype(net)
        img = np.zeros((500, 500, 3), dtype=np.uint8)
        img += 10
        NEAT.DrawPhenotype(img, (0, 0, 500, 500), net)
        cv2.imshow("CPPN", img)

        net = NEAT.NeuralNetwork()
        pop.Species[0].GetLeader().Build_ES_Phenotype(net, substrate, params)
        img = np.zeros((500, 500, 3), dtype=np.uint8)
        img += 10

        utilities.DrawPhenotype(img, (0, 0, 500, 500), net, substrate=True)
        cv2.imshow("NN", img)
        cv2.waitKey(1)

        generations = generation

        if best > 15.0:
            break

        pop.Epoch()

    return generations
Пример #23
0
def objective_driven(seed):
    i = seed
    g = NEAT.Genome(0, 6, 0, 4, False, NEAT.ActivationFunction.SIGNED_SIGMOID,
                    NEAT.ActivationFunction.SIGNED_SIGMOID, 0, params)
    pop = NEAT.Population(g, params, True, 1.0, i)
    #pop.RNG.Seed(i)

    generations = 0
    for generation in range(250):
        genome_list = NEAT.GetGenomeList(pop)
        fitness_list = NEAT.EvaluateGenomeList_Serial(genome_list,
                                                      evaluate,
                                                      display=False)
        fitness_list = [k[0] for k in fitness_list]
        NEAT.ZipFitness(genome_list, fitness_list)

        best_fits = [x.GetLeader().GetFitness() for x in pop.Species]
        best = max(best_fits)
        idx = best_fits.index(best)
        print best, pop.Species[idx].GetLeader().GetFitness()
        imgs, res = evaluate(pop.Species[idx].GetLeader(),
                             debug=True,
                             save="gen%d.ply" % generation)

        plt.ion()
        plt.clf()
        subfig = 1
        t_imgs = len(imgs)
        for img in imgs:
            plt.subplot(t_imgs, 1, subfig)
            plt.title("Confidence: %0.2f%%" %
                      (res[subfig - 1, target_class] * 100.0))
            plt.imshow(img)
            subfig += 1
        plt.draw()
        plt.pause(0.1)
        plt.savefig("out%d.png" % generation)
        pop.Epoch()

        generations = generation

    return generations
Пример #24
0
def main():
    # for as many as maxGeneration
    for generation in range(50):

        # retrieve a list of all genomes in the population
        genome_list = NEAT.GetGenomeList(pop)

        # apply the evaluation function to all genomes
        sum = 0
        for genome in genome_list:
            fitness = evaluate(genome)
            genome.SetFitness(fitness)
            sum += fitness
        avg = sum / float(len(genome_list))
        print avg
        # at this point we may output some information regarding the progress of evolution, best fitness, etc.
        # it's also the place to put any code that tracks the progress and saves the best genome or the entire
        # population. We skip all of this in the tutorial.

        # advance to the next generation
        pop.Epoch()
def getbest(i):
    g = NEAT.Genome(0, substrate.GetMinCPPNInputs(), 0,
                    substrate.GetMinCPPNOutputs(), False,
                    NEAT.ActivationFunction.TANH, NEAT.ActivationFunction.TANH,
                    0, params, 0)

    pop = NEAT.Population(g, params, True, 1.0, i)
    pop.RNG.Seed(i)

    for generation in range(max_generations):
        genome_list = NEAT.GetGenomeList(pop)
        # if sys.platform == 'linux':
        #    fitnesses = EvaluateGenomeList_Parallel(genome_list, evaluate, display=False)
        # else:
        fitnesses = EvaluateGenomeList_Serial(genome_list,
                                              evaluate,
                                              display=False)
        [
            genome.SetFitness(fitness)
            for genome, fitness in zip(genome_list, fitnesses)
        ]

        net = NEAT.NeuralNetwork()
        pop.GetBestGenome().BuildPhenotype(net)

        complexity = "complexity ({}, {})".format(net.NumHiddenNeurons(),
                                                  net.NumConnections())
        print('Gen: %d/%d Best: %3.5f. %s' %
              (generation, max_generations - 1, max(fitnesses), complexity))

        best = max(fitnesses)

        pop.Epoch()
        generations = generation

        if best > 15.0:
            break

    return generations
Пример #26
0
def evolve_neat(params, generations, out_dir, run_id, pool):
    pop = create_initial_population(params)
    max_ever = None
    t = time.time()

    for generation in range(generations):
        print(run_id, 'Starting generation', generation)
        genomes = NEAT.GetGenomeList(pop)

        if pool:
            data = [(g, g.GetGenomeTraits(), params) for g in genomes]
            fitnesses = pool.starmap(evaluate, data)
        else:
            fitnesses = [
                evaluate(g, g.GetGenomeTraits(), params) for g in genomes
            ]

        NEAT.ZipFitness(genomes, fitnesses)

        maxf, meanf = max(fitnesses), sum(fitnesses) / float(len(fitnesses))
        runtime = time.time() - t
        t = time.time()

        print()
        print('Generation %i ran in %f, %f per coral' % \
                                    (generation, runtime, runtime/len(genomes)))
        print('Max fitness:', maxf, 'Mean fitness:', meanf)

        if max_ever is None or maxf > max_ever:
            best = pop.GetBestGenome()
            max_ever = maxf
            print('New best fitness.', best.NumNeurons(), best.NumLinks())
            coral = simulate_and_save(best, params, out_dir, generation, maxf,
                                      meanf)[0]

        pop.Epoch()
        print('#' * 80)

    print('Run Complete.')
Пример #27
0
def evolve():
    g = NEAT.Genome(0, 2, 0, 1, False, NEAT.ActivationFunction.LINEAR,
                    NEAT.ActivationFunction.LINEAR, 0, params, 0)
    pop = NEAT.Population(g, params, True, 1.0, 1)
    pop.RNG.Seed(int(time.clock()*100))

    generations = 0
    for generation in range(50):
        genome_list = NEAT.GetGenomeList(pop)
        fitness_list = []
        for genome in genome_list:
            fitness_list.append(evaluate(genome))
        NEAT.ZipFitness(genome_list, fitness_list)
        pop.Epoch()
        generations = generation
        best = -max(fitness_list)
        bestG = pop.GetBestGenome()
        
        plot_nn(bestG)
        plt.pause(0.01)
        plt.ion()
        plt.show(block=False)
        
        print("Mejor fitness [",generation,"]: ",best)
        if best < 0.01:
            break

    testNet = NEAT.NeuralNetwork()
    bestG.BuildPhenotype(testNet)
    for i in range(10):
        testNet.Flush()
        testNet.Input(np.array([float(100+2*i), 1]))
        for _ in range(2):
            testNet.Activate()
        o = testNet.Output()
        print(100+2*i,"/ 2 = ",o[0]) 
        
    return generations
def run_experiment(params, trial_id, n_generations, out_dir=None, view_results=False, save_results=True):
    g = NEAT.Genome(0, 3, 0, 1, False, NEAT.ActivationFunction.UNSIGNED_SIGMOID,
                    NEAT.ActivationFunction.UNSIGNED_SIGMOID, 0, params, 0)
    pop = NEAT.Population(g, params, True, 1.0, trial_id)

    # set random seed
    seed = int(time.time())
    pop.RNG.Seed(seed)

    generations = 0
    solved = False
    max_fitness = 0
    complexity = 0
    for generation in range(n_generations):
        genome_list = NEAT.GetGenomeList(pop)
        fitness_list = EvaluateGenomeList_Serial(genome_list, evaluate, display=view_results)
        NEAT.ZipFitness(genome_list, fitness_list)
        generations = generation
        best = max(genome_list, key=get_fitness)
        best_fitness = best.GetFitness()
        complexity = best.NumNeurons() + best.NumLinks()
        solved = best_fitness > 15.5 # Changed to correspond limit used with other tested libraries
        if solved:
            max_fitness = best_fitness
            print("Trial: %2d\tgeneration: %d\tfitness: %f\tcomplexity: %d\tseed: %d" % (trial_id, generations, max_fitness, complexity, seed))
            break
        # check if best fitness in this generation is better than current maximum
        max_fitness = max(best_fitness, max_fitness)

        # move to the next epoch
        pop.Epoch()
            
    if not solved:
        print("Trial: %2d\tFAILED\t\tfitness: %f\tcomplexity: %d\tseed: %d" % (trial_id, max_fitness, complexity, seed))

    return solved, generations, complexity, max_fitness
Пример #29
0
def evolutionary_run(gens, pop_size, output_path, run_num, numofjoints):
    """ Conduct an evolutionary run using the snake and muscle model. 
    """
    params = NEAT.Parameters()
    params.CompatTreshold = 5.0
    params.CompatTresholdModifier = 0.3
    params.YoungAgeTreshold = 15
    params.SpeciesMaxStagnation = 1000
    params.OldAgeTreshold = 35
    params.MinSpecies = 1
    params.MaxSpecies = 25
    params.RouletteWheelSelection = False
    params.RecurrentProb = 0.25
    params.OverallMutationRate = 0.33
    params.MutateWeightsProb = 0.90
    params.WeightMutationMaxPower = 1.0
    params.WeightReplacementMaxPower = 5.0
    params.MutateWeightsSevereProb = 0.5
    params.WeightMutationRate = 0.75
    params.MaxWeight = 20
    params.MutateAddNeuronProb = 0.4
    params.MutateAddLinkProb = 0.4
    params.MutateRemLinkProb = 0.05
    params.CrossoverRate = 0.4

    assert pop_size >= 0, "wrong population size argument! pop_size: %d" % pop_size
    params.PopulationSize = pop_size

    # worm has only one dof (turning around y-axis) per joint
    num_outputs = numofjoints
    # the inputs for the ANN are the 7 current joint positions and the amplitude of a sine wave as well as a bias
    num_inputs = numofjoints + 1 + 1

    # Initialize the population
    # Genome(ID, NumInputs, NumHidden, NumOutputs, ActivationFunction?, Output activation function, Hidden layer acitvation function, seed, params)
    genome = NEAT.Genome(0, num_inputs, 0, num_outputs, False,
                         NEAT.ActivationFunction.SIGNED_SIGMOID,
                         NEAT.ActivationFunction.SIGNED_SIGMOID, 0, params)
    # Population(Genome, params, randomizedweights?, randomrange)
    population = NEAT.Population(genome, params, True, 1.0)
    genome_list = NEAT.GetGenomeList(population)

    morph_pop = MorphGenomes(pop_size)
    for ind in genome_list:
        morph_pop.addIndividual(ind.GetID())
    morph_genomes = morph_pop.getGenomes()

    # ANN genome and morphology genome are zipped together
    # Zip the two genome components together for use in the parallel call.
    zip_args = [(ind, morph_genomes[ind.GetID()]) for ind in genome_list]

    mnlog.write_population_statistics_headers(output_path + str(run_num) +
                                              "_fitnesses.dat")

    # Setup multiprocessing
    cores = mpc.cpu_count()
    #cores = 1
    pool = mpc.Pool(initializer=initProcess,
                    initargs=(
                        GlobalVarWorkaround.args,
                        GlobalVarWorkaround.man,
                        GlobalVarWorkaround.worm,
                    ),
                    processes=cores)

    assert gens >= 0, "wrong number of generations as argument! gens: %d" % gens

    for gen in xrange(gens):
        print gen
        #fitnesses = map(evaluate_individual,zip_args) # serial execution
        fitnesses = pool.map(evaluate_individual, zip_args)

        replace_nanfitnesses(fitnesses)

        for g, f in zip(genome_list, fitnesses):
            g.SetFitness(f)

        print("Generation " + str(gen) + "\t: " + str(max(fitnesses)))

        # Write the best performing individual to a file.
        mnlog.write_best_individual(
            output_path + "best_individuals/Evo_NEAT_run_" + str(run_num) +
            "_best_gen_" + str(gen) + ".dat",
            genome_list[fitnesses.index(max(fitnesses))])
        morph_pop.logGenome(
            genome_list[fitnesses.index(max(fitnesses))].GetID(),
            "Evo_NEAT_run_" + str(run_num) + "_best_gen_" + str(gen),
            output_path)

        # Write information about the best individual we wrote.
        with open(
                output_path + "/" + str(run_num) +
                "_best_individuals_logging.dat", "a") as f:
            f.write("Generation: "+str(gen)+" Individual is: "+str(genome_list[fitnesses.index(max(fitnesses))].GetID())+\
                " Fitness is: "+str(max(fitnesses))+"\n")

        # Log the progress of the entire population.
        mnlog.write_population_statistics(
            output_path + str(run_num) + "_fitnesses.dat", genome_list,
            fitnesses, gen)

        # Log the final population for later evaluation.
        if gen == gens - 1:
            population.Save(output_path + "run_" + str(run_num) +
                            "_population_generation_" + str(gen) + ".dat")

        # Create the next generation
        population.Epoch()
        genome_list = NEAT.GetGenomeList(population)
        morph_pop.NextGen()
        zip_args = []
        for ind in genome_list:
            # PID .. parent ID
            pid1 = ind.GetPID1()

            # if pid2 is negative it means that no crossover happend!
            pid2 = ind.GetPID2()
            gid = ind.GetID()

            # Handle Crossover
            morph_pop.Crossover(gid, pid1, pid2)

        # Handle Mutation
        morph_pop.MutatePopulation()

        # Zip the arguments for calling the evolution function.
        morph_genomes = morph_pop.getGenomes()
        zip_args = [(ind, morph_genomes[ind.GetID()]) for ind in genome_list]
Пример #30
0
        action = np.argmax(out)
        observation, reward, done, info = env.step(action)
        if done: break

        f += reward

    avg_reward += f
    return avg_reward


try:

    for generation in range(100):

        for i_episode, genome in enumerate(NEAT.GetGenomeList(pop)):

            net = NEAT.NeuralNetwork()
            genome.BuildPhenotype(net)

            avg_reward = 0

            for trial in range(trials):

                avg_reward += do_trial()

            avg_reward /= trials

            #print(avg_reward)

            genome.SetFitness(1000000 + avg_reward)