class DJTempoMatch(object): def __init__(self): # Create a Topology inputs = 256 hiddens = 100 outputs = 5 bias = 1 topology = self._get_topology(inputs=inputs, hiddens=hiddens, outputs=outputs, bias=bias) # Create a factory for genotypes (i.e. a function that returns a new # instance each time it is called) genotype = lambda: NEATGenotype(inputs=inputs, outputs=outputs, feedforward=False, weight_range=(-1., 1.), types=['tanh'], topology = topology, bias_as_node = bias) # Create a population self.pop = NEATPopulation(genotype, popsize=10) # Create a task self.dpnv = PoleBalanceTask(velocities=True, max_steps=10, penalize_oscillation=True) def _get_topology(self, inputs=5, hiddens = 3, outputs=2, bias=0): topology = [] for i in range(inputs + bias): for j in range(inputs + bias, inputs + bias + hiddens): topology.append([i, j]) # Recurrent Network for i in range(inputs + bias, inputs + bias + hiddens): for j in range(inputs + bias + hiddens, inputs + bias + hiddens * 2): topology.append([i, j]) topology.append([j, i]) for i in range(inputs + bias, inputs + bias + hiddens): for j in range(inputs + bias + hiddens * 2, inputs + bias + hiddens * 2 + outputs): topology.append([i, j]) return topology def start(self, gen=100): # Run the evolution, tell it to use the task as an evaluator self.pop.epoch(generations=gen, evaluator=self.dpnv, solution=self.dpnv, callback=self.__new_generation) def __new_generation(self, pop): print pop.champions[-1] pop.champions[-1].visualize("img.png");
def run(method, level, generations=500, popsize=500, visualize_individual=None): shape = (3, 3) task = TargetWeightsTask(substrate_shape=shape, noise=level, fitnessmeasure='sqerr') substrate = Substrate() substrate.add_nodes(shape, 'l') substrate.add_connections('l', 'l') if method == 'hyperneat': geno = lambda: NEATGenotype( feedforward=True, inputs=len(shape) * 2, weight_range=(-3.0, 3.0), prob_add_conn=0.3, prob_add_node=0.03, types=['sin', 'linear', 'gauss', 'sigmoid', 'abs']) pop = NEATPopulation(geno, popsize=popsize, target_species=8) developer = HyperNEATDeveloper(substrate=substrate, add_deltas=False, sandwich=False) elif method == '0hn': t = [(i, 4) for i in range(4)] geno = lambda: NEATGenotype( feedforward=True, inputs=len(shape) * 2, weight_range=(-3.0, 3.0), prob_add_conn=0.0, prob_add_node=0.00, topology=t, types=['sin', 'linear', 'gauss', 'sigmoid', 'abs']) pop = NEATPopulation(geno, popsize=popsize, target_species=8) developer = HyperNEATDeveloper(substrate=substrate, add_deltas=False, sandwich=False) elif method == 'wavelet': geno = lambda: WaveletGenotype(inputs=len(shape) * 2) pop = SimplePopulation(geno, popsize=popsize) developer = WaveletDeveloper(substrate=substrate, add_deltas=False, sandwich=False) results = pop.epoch(generations=generations, evaluator=partial(evaluate, task=task, developer=developer)) return results
def run(method, level, generations=500, popsize=500, visualize_individual=None): shape = (3,3) task = TargetWeightsTask(substrate_shape=shape, noise=level, fitnessmeasure='sqerr') substrate = Substrate() substrate.add_nodes(shape, 'l') substrate.add_connections('l', 'l') if method == 'hyperneat': geno = lambda: NEATGenotype(feedforward=True, inputs=len(shape)*2, weight_range=(-3.0, 3.0), prob_add_conn=0.3, prob_add_node=0.03, types=['sin', 'linear', 'gauss', 'sigmoid', 'abs']) pop = NEATPopulation(geno, popsize=popsize, target_species=8) developer = HyperNEATDeveloper(substrate=substrate, add_deltas=False, sandwich=False) elif method == '0hn': t = [(i, 4) for i in range(4)] geno = lambda: NEATGenotype(feedforward=True, inputs=len(shape)*2, weight_range=(-3.0, 3.0), prob_add_conn=0.0, prob_add_node=0.00, topology=t, types=['sin', 'linear', 'gauss', 'sigmoid', 'abs']) pop = NEATPopulation(geno, popsize=popsize, target_species=8) developer = HyperNEATDeveloper(substrate=substrate, add_deltas=False, sandwich=False) elif method == 'wavelet': geno = lambda: WaveletGenotype(inputs=len(shape)*2) pop = SimplePopulation(geno, popsize=popsize) developer = WaveletDeveloper(substrate=substrate, add_deltas=False, sandwich=False) results = pop.epoch(generations=generations, evaluator=partial(evaluate, task=task, developer=developer) ) return results
generation['individuals'].append({ 'node_genes': deepcopy(individual.node_genes), 'conn_genes': copied_connections, 'stats': deepcopy(individual.stats) }) time_format = time.strftime('%Y%m%d%H%M') champion_file = task.experimentName + '_{}_{}_{}.p'.format(time_format, commit_sha, population.generation) generation['champion_file'] = champion_file generation['species'] = [len(species.members) for species in population.species] print generation['species'] log['generations'].append(generation) task.getLogger().info(', '.join([str(ind.stats['fitness']) for ind in population.population])) jsonLog = open(task.jsonLogFilename, "w") json.dump(log, jsonLog) jsonLog.close() current_champ = population.champions[-1] # print 'Champion: ' + str(current_champ.get_network_data()) # current_champ.visualize(os.path.join(CURRENT_FILE_PATH, 'img/' + task.experimentName + '_%d.jpg' % population.generation)) pickle.dump(current_champ, file(os.path.join(PICKLED_DIR, champion_file), 'w')) try: pop.epoch(generations=GENERATIONS, evaluator=task, solution=task, callback=epoch_callback) except KeyboardInterrupt: release_resources(task.thymioController) sys.exit(1) release_resources(task.thymioController) sys.exit(0)
generation['champion_file'] = champion_file generation['species'] = [ len(species.members) for species in population.species ] print generation['species'] log['generations'].append(generation) task.getLogger().info(', '.join( [str(ind.stats['fitness']) for ind in population.population])) jsonLog = open(task.jsonLogFilename, "w") json.dump(log, jsonLog) jsonLog.close() current_champ = population.champions[-1] # print 'Champion: ' + str(current_champ.get_network_data()) # current_champ.visualize(os.path.join(CURRENT_FILE_PATH, 'img/' + task.experimentName + '_%d.jpg' % population.generation)) pickle.dump(current_champ, file(os.path.join(PICKLED_DIR, champion_file), 'w')) try: pop.epoch(generations=GENERATIONS, evaluator=task, solution=task, callback=epoch_callback) except KeyboardInterrupt: release_resources(task.thymioController) sys.exit(1) release_resources(task.thymioController) sys.exit(0)
### IMPORTS ### import sys, os from functools import partial from collections import defaultdict sys.path.append(os.path.join(os.path.split(__file__)[0], '..', '..')) from peas.methods.neat import NEATPopulation, NEATGenotype # from peas.methods.neatpythonwrapper import NEATPythonPopulation from peas.tasks.xor import XORTask # Create a factory for genotypes (i.e. a function that returns a new # instance each time it is called) genotype = lambda: NEATGenotype( inputs=2, weight_range=(-3., 3.), types=['tanh']) # Create a population pop = NEATPopulation(genotype, popsize=150) # Create a task task = XORTask() nodecounts = defaultdict(int) for i in range(100): # Run the evolution, tell it to use the task as an evaluator pop.epoch(generations=100, evaluator=task, solution=task) nodecounts[len(pop.champions[-1].node_genes)] += 1 print(sorted(nodecounts.items()))
def run(method, setup, generations=15, popsize=10): """main function that drives the evolution""" # the following 'task_kwds' and 'task' variables should be ignored as they come from an example from the framework - I had no time to remove them yet. task_kwds = dict(field='eight', observation='eight_striped', max_steps=3000, friction_scale=0.1, damping=0.9, motor_torque=3, check_coverage=True, flush_each_step=False, initial_pos=(17, 256, np.pi*0.5)) #TODO: remove task = DetectorTask(**task_kwds) #TODO: remove # Detector has a specific topology: input 21x21; output 1x0 substrate = Substrate() substrate.add_nodes([(r, theta) for r in np.linspace(-10,10,21) for theta in np.linspace(-10, 10, 21)], 'input', is_input=True) substrate.add_nodes([(r, theta) for r in np.linspace(0,0,1) for theta in np.linspace(0, 0, 1)], 'output') substrate.add_connections('input', 'output', -1) # evolutionary parameters geno_kwds = dict(feedforward=True, inputs=441, outputs=1, max_depth=3, max_nodes=MAXIMUM_NODES, weight_range=(-3.0, 3.0), prob_add_conn=0.3, prob_add_node=0.1, bias_as_node=False, types=['sigmoid']) geno = lambda: NEATGenotype(**geno_kwds) pop = NEATPopulation(geno, popsize=popsize, target_species=8) # sigmoid activation is only used for the generated ANN, since 'hnn' package can has only sigmoid activations developer = HyperNEATDeveloper(substrate=substrate, add_deltas=False, sandwich=False, node_type='sigmoid') results = pop.epoch(generations=generations, evaluator=partial(evaluate, task=task, developer=developer), solution=partial(solve, task=task, developer=developer), ) # output best solutions from every generation into a file 'best_solution_N', where 'N' is the ID of the detector fitnesses = list() fifo = open(os.path.join(os.path.dirname(__file__), '../../best_solution_N'), 'a+') for champion in results['champions']: fitnesses.append(champion.stats['fitness']) phenotype = developer.convert(champion) # option to visualise the detector ANN. Use some sort of counter so that the images won't be overwritten #dir = os.path.dirname('../../visual_N.png') #if not os.path.exists(dir): # os.makedirs(dir) #phenotype.visualize('../../visual_N.png', inputs=441, outputs=1) rest = MAXIMUM_NODES - len((champion.get_network_data()[1])[1:]) rest_array = np.linspace(4., 4., rest) for idx, node_type in enumerate((champion.get_network_data()[1])[1:]): if (idx == len((champion.get_network_data()[1])[1:]) - 2): nodes_types_indexes.extend(rest_array) try: nodes_types_indexes.append(float(['sin', 'bound', 'linear', 'gauss', 'sigmoid', 'abs'].index(node_type))) except NameError: print "not defined!" fifo.write('fitness: '+str(champion.stats['fitness'])+' || ' + ' '.join(map(str, node_types)) + ' '+' '.join(map(str, phenotype.get_connectivity_matrix()))+'\n') fifo.close() # Visualize evolution in a graph import matplotlib.pyplot as plt from matplotlib.ticker import MaxNLocator plt.figure() x = range(len(results['champions'])) y = np.asarray(fitnesses) xa = plt.gca().get_xaxis() xa.set_major_locator(MaxNLocator(integer=True)) plt.plot(x, y) plt.axis('on') plt.savefig(os.path.join(os.getcwd(), '../../fitness_evolution_N'), bbox_inches='tight', pad_inches=0) plt.close() return results
### IMPORTS ### import sys, os from functools import partial from collections import defaultdict sys.path.append(os.path.join(os.path.split(__file__)[0],'..','..')) from peas.methods.neat import NEATPopulation, NEATGenotype # from peas.methods.neatpythonwrapper import NEATPythonPopulation from peas.tasks.xor import XORTask # Create a factory for genotypes (i.e. a function that returns a new # instance each time it is called) genotype = lambda: NEATGenotype(inputs=2, weight_range=(-3., 3.), types=['sigmoid2']) # Create a population pop = NEATPopulation(genotype, popsize=150) # Create a task task = XORTask() nodecounts = defaultdict(int) for i in xrange(100): # Run the evolution, tell it to use the task as an evaluator pop.epoch(generations=100, evaluator=task, solution=task) nodecounts[len(pop.champions[-1].node_genes)] += 1 print sorted(nodecounts.items())
def run(method, splits, generations=500, popsize=500): complexity = 'half' splits = int(splits) funcs = [] if complexity in ['half', 'flat', 'slope']: funcs.append((True, np.random.random() * 6. - 3)) for num in range(splits): axis = random_direction_vector() offset = np.random.random() - 0.2 where = partial(area, axis=axis, offset=offset) if complexity == 'half': xs = 0 if num % 2 == 0 else 1 mp = 1 if (num//2) % 2 == 0 else -1 if num < 2: d = 0 elif num < 2 + 4: d = 0.5 elif num < 2 + 4 + 4: d = 0.25 elif num < 2 + 4 + 4 + 4: d = 0.75 where = partial(split, axis=xs, flip=mp, distance=d) what = lambda c, v: v + np.random.random() * 6. - 3 funcs.append((where, what)) task = TargetWeightsTask(substrate_shape=(8,), funcs=funcs, fitnessmeasure='sqerr', uniquefy=True) substrate = Substrate() substrate.add_nodes((8,), 'l') substrate.add_connections('l', 'l') if method == 'hyperneat': geno = lambda: NEATGenotype(feedforward=True, inputs=2, weight_range=(-3.0, 3.0), prob_add_conn=0.3, prob_add_node=0.03, types=['sin', 'ident', 'gauss', 'sigmoid', 'abs']) pop = NEATPopulation(geno, popsize=popsize, target_species=8) developer = HyperNEATDeveloper(substrate=substrate, add_deltas=False, sandwich=False) elif method == '0hnmax': geno = lambda: NEATGenotype(feedforward=True, inputs=2, weight_range=(-3.0, 3.0), max_nodes=3, types=['sin', 'ident', 'gauss', 'sigmoid', 'abs']) pop = NEATPopulation(geno, popsize=popsize, target_species=8) developer = HyperNEATDeveloper(substrate=substrate, add_deltas=False, sandwich=False) elif method == 'wavelet': geno = lambda: WaveletGenotype(inputs=2) pop = SimplePopulation(geno, popsize=popsize) developer = WaveletDeveloper(substrate=substrate, add_deltas=False, sandwich=False) results = pop.epoch(generations=generations, evaluator=partial(evaluate, task=task, developer=developer), ) return results
def run(method, splits, generations=500, popsize=500): complexity = 'half' splits = int(splits) funcs = [] if complexity in ['half', 'flat', 'slope']: funcs.append((True, np.random.random() * 6. - 3)) for num in range(splits): axis = random_direction_vector() offset = np.random.random() - 0.2 where = partial(area, axis=axis, offset=offset) if complexity == 'half': xs = 0 if num % 2 == 0 else 1 mp = 1 if (num // 2) % 2 == 0 else -1 if num < 2: d = 0 elif num < 2 + 4: d = 0.5 elif num < 2 + 4 + 4: d = 0.25 elif num < 2 + 4 + 4 + 4: d = 0.75 where = partial(split, axis=xs, flip=mp, distance=d) what = lambda c, v: v + np.random.random() * 6. - 3 funcs.append((where, what)) task = TargetWeightsTask(substrate_shape=(8, ), funcs=funcs, fitnessmeasure='sqerr', uniquefy=True) substrate = Substrate() substrate.add_nodes((8, ), 'l') substrate.add_connections('l', 'l') if method == 'hyperneat': geno = lambda: NEATGenotype( feedforward=True, inputs=2, weight_range=(-3.0, 3.0), prob_add_conn=0.3, prob_add_node=0.03, types=['sin', 'ident', 'gauss', 'sigmoid', 'abs']) pop = NEATPopulation(geno, popsize=popsize, target_species=8) developer = HyperNEATDeveloper(substrate=substrate, add_deltas=False, sandwich=False) elif method == '0hnmax': geno = lambda: NEATGenotype( feedforward=True, inputs=2, weight_range=(-3.0, 3.0), max_nodes=3, types=['sin', 'ident', 'gauss', 'sigmoid', 'abs']) pop = NEATPopulation(geno, popsize=popsize, target_species=8) developer = HyperNEATDeveloper(substrate=substrate, add_deltas=False, sandwich=False) elif method == 'wavelet': geno = lambda: WaveletGenotype(inputs=2) pop = SimplePopulation(geno, popsize=popsize) developer = WaveletDeveloper(substrate=substrate, add_deltas=False, sandwich=False) results = pop.epoch( generations=generations, evaluator=partial(evaluate, task=task, developer=developer), ) return results
def start(self, evaluator, popsize, generations, max_motor_speed, foraging): if self.use_hyperneat: # HYPERNEAT NOT USED genotype = lambda innovations={}: NEATGenotype() else: genotype = lambda innovations={}: NEATGenotype( inputs=task.inputs, outputs=task.outputs, weight_range=task.weight_range, types=task.types, innovations=innovations, feedforward=task.feedforward, prob_add_node=task.prob_add_node, prob_add_conn=task.prob_add_conn, prob_mutate_weight=task.prob_mutate_weight, prob_reset_weight=task.prob_reset_weight, prob_reenable_conn=task.prob_reenable_conn, prob_disable_conn=task.prob_disable_conn, prob_reenable_parent=task.prob_reenable_parent, prob_mutate_bias=task.prob_mutate_bias, prob_mutate_response=task.prob_mutate_response, prob_mutate_type=task.prob_mutate_type, stdev_mutate_weight=task.stdev_mutate_weight, stdev_mutate_bias=task.stdev_mutate_bias, stdev_mutate_response=task.stdev_mutate_response, phys_dis_neat=task.phys_dis_neat, max_depth=task.max_depth, max_nodes=task.max_nodes, response_default=task.response_default, initial_weight_stdev=task.initial_weight_stdev, bias_as_node=task.bias_as_node, distance_excess=task.distance_excess, distance_disjoint=task.distance_disjoint, distance_weight=task.distance_weight) pop = NEATPopulation( genotype, popsize=evaluator.popsize, elitism=evaluator.elitism, compatibility_threshold=evaluator.compatibility_threshold, compatibility_threshold_delta=evaluator. compatibility_threshold_delta, target_species=evaluator.target_species, min_elitism_size=evaluator.min_elitism_size, young_age=evaluator.young_age, prob_mutate=evaluator.prob_mutate, young_multiplier=evaluator.young_multiplier, stagnation_age=evaluator.stagnation_age, old_age=evaluator.old_age, old_multiplier=evaluator.old_multiplier, tournament_selection_k=evaluator.tournament_selection_k, reset_innovations=evaluator.reset_innovations, survival=evaluator.survival, phys_dis_neat=evaluator.phys_dis_neat, sim_dis_neat=evaluator.sim_dis_neat, ip_address=self.ip) log = {'parameters': {}, 'generations': []} # log neat settings. log['parameters'] = { 'max_speed': max_motor_speed, 'inputs': evaluator.inputs, 'outputs': evaluator.outputs, 'weight_range': evaluator.weight_range, 'types': evaluator.types, 'feedforward': evaluator.feedforward, 'prob_add_node': evaluator.prob_add_node, 'prob_add_conn': evaluator.prob_add_conn, 'prob_mutate_weight': evaluator.prob_mutate_weight, 'prob_reset_weight': evaluator.prob_reset_weight, 'prob_reenable_conn': evaluator.prob_reenable_conn, 'prob_disable_conn': evaluator.prob_disable_conn, 'prob_reenable_parent': evaluator.prob_reenable_parent, 'prob_mutate_bias': evaluator.prob_mutate_bias, 'prob_mutate_response': evaluator.prob_mutate_response, 'prob_mutate_type': evaluator.prob_mutate_type, 'stdev_mutate_weight': evaluator.stdev_mutate_weight, 'stdev_mutate_bias': evaluator.stdev_mutate_bias, 'stdev_mutate_response': evaluator.stdev_mutate_response, 'phys_dis_neat': evaluator.phys_dis_neat, 'max_depth': evaluator.max_depth, 'max_nodes': evaluator.max_nodes, 'response_default': evaluator.response_default, 'initial_weight_stdev': evaluator.initial_weight_stdev, 'bias_as_node': evaluator.bias_as_node, 'distance_excess': evaluator.distance_excess, 'distance_disjoint': evaluator.distance_disjoint, 'distance_weight': evaluator.distance_weight, 'popsize': evaluator.popsize, 'elitism': evaluator.elitism, 'compatibility_threshold': evaluator.compatibility_threshold, 'compatibility_threshold_delta': evaluator.compatibility_threshold_delta, 'target_species': evaluator.target_species, 'min_elitism_size': evaluator.min_elitism_size, 'young_age': evaluator.young_age, 'prob_mutate ': evaluator.prob_mutate, 'young_multiplier': evaluator.young_multiplier, 'stagnation_age': evaluator.stagnation_age, 'old_age': evaluator.old_age, 'old_multiplier': evaluator.old_multiplier, 'tournament_selection_k': evaluator.tournament_selection_k, 'reset_innovations': evaluator.reset_innovations, 'survival': evaluator.survival, 'phys_dis_neat': evaluator.phys_dis_neat, 'sim_dis_neat': evaluator.sim_dis_neat, 'ip_address': self.ip } log['parameters'].update(self.evaluator.logs()) dbus.mainloop.glib.DBusGMainLoop(set_as_default=True) bus = dbus.SessionBus() thymioController = dbus.Interface( bus.get_object('ch.epfl.mobots.Aseba', '/'), dbus_interface='ch.epfl.mobots.AsebaNetwork') thymioController.LoadScripts(AESL_PATH, reply_handler=dbusReply, error_handler=dbusError) # switch thymio LEDs off thymioController.SendEventName('SetColor', [0, 0, 0, 0], reply_handler=dbusReply, error_handler=dbusError) # thresholds are set > SHOULD BE IN FORAGING if foraging == True: self.detector = object_detector.ObjectDetector( 0.4, 0.01, thymioController) self.evaluator.logger.info( str(self.ip), 'Puck_threshold:' + str(self.detector.has_puck_threshold)) self.evaluator.logger.info( str(self.ip), 'Goal_threshold:' + str(self.detector.has_goal_threshold2)) task = self.evaluator task.set_thymio_controller(thymioController) ctrl_serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) ctrl_serversocket.bind((self.ip, 1337)) ctrl_serversocket.listen(5) ctrl_client = None img_serversocket = None img_client = None def set_client(): global ctrl_client print 'Control server: waiting for socket connections...' (ctrl_client, address) = ctrl_serversocket.accept() task.set_ctrl_client(ctrl_client) print 'Control server: got connection from', address thread.start_new_thread(set_client, ()) if self.use_img_client: img_serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) img_serversocket.bind((self.ip, 31337)) img_serversocket.listen(5) def set_img_client(): global img_client print 'Image server: waiting for socket connections...' (img_client, address) = img_serversocket.accept() print 'Image server: got connection from', address write_header(img_client) thread.start_new_thread(set_img_client, ()) def epoch_callback(population): # update log dump population_backup = population.giveBackUp() species_backup = population.giveBackUpSpecies() generation = { 'individuals': [], 'phenotypes': [], 'gen_number': population.generation } for individual in population_backup: copied_connections = { str(key): value for key, value in individual.conn_genes.items() } generation['individuals'].append({ 'node_genes': deepcopy(individual.node_genes), 'conn_genes': copied_connections, 'stats': deepcopy(individual.stats) }) if self.use_hyperneat: for phenotype in population.phenotype_backup: generation['phenotypes'].append({ 'cm': deepcopy(phenotype.cm), 'act': deepcopy(phenotype.act) }) generation['species_id'] = [ species.id for species in species_backup ] generation['species_size'] = [ len(species.members) for species in species_backup ] log['generations'].append(generation) #task.getLogger().info(', '.join([str(ind.stats['fitness']) for ind in population_backup.population])) outputDir = os.path.join(OUTPUT_PATH, self.experiment_name) if population.generation == 1: self.firstdate = time.strftime("%d-%m-%y_%H-%M") date = time.strftime("%d-%m-%y_%H-%M") jsonLogFilename = os.path.join( outputDir, self.experiment_name + '_' + date + '.json') with open(jsonLogFilename, 'w') as f: #print(outputDir, self.experiment_name + '_' + date + '.json') json.dump(log, f, cls=CustomEncoder) # update clean file summary filename = os.path.join( outputDir, self.experiment_name + '_' + self.firstdate + '_cleanlog.txt') #print filename index = 1 for individual in population_backup: msg = "%d\t%d\t%f\t%d\t%d\t%d\t%d\t%d\t%d\t" % ( population.generation, index, individual.stats['fitness'], len(individual.node_genes), len(individual.conn_genes), individual.stats['specieid'], individual.stats['id'], individual.stats['parent1'], individual.stats['parent2']) index += 1 with open(filename, 'a') as f: f.write(msg + "\n") try: evaluator = lambda evaluee: task.evaluate(NeuralNetwork(evaluee)) \ if hasattr(evaluee, 'get_network_data') \ else task.evaluate(evaluee) converter = hn.create_converter( task.substrate()) if self.use_hyperneat else lambda x: x pop.epoch(generations=task.generations, evaluator=evaluator, solution=evaluator, callback=epoch_callback, converter=converter) except KeyboardInterrupt: release_resources(task.thymioController, ctrl_serversocket, ctrl_client, img_serversocket, img_client) sys.exit(1) release_resources(task.thymioController, ctrl_serversocket, ctrl_client, img_serversocket, img_client) sys.exit(0)
#!/usr/bin/env python ### IMPORTS ### import sys, os from functools import partial sys.path.append(os.path.join(os.path.split(__file__)[0], '..', '..')) from peas.methods.neat import NEATPopulation, NEATGenotype # from peas.methods.neatpythonwrapper import NEATPythonPopulation from peas.tasks.polebalance import PoleBalanceTask # Create a factory for genotypes (i.e. a function that returns a new # instance each time it is called) genotype = lambda: NEATGenotype( inputs=6, weight_range=(-50., 50.), types=['tanh'], feedforward=False) # Create a population pop = NEATPopulation(genotype, popsize=200) # Create a task dpnv = PoleBalanceTask(velocities=True, max_steps=100000, penalize_oscillation=True) # Run the evolution, tell it to use the task as an evaluator pop.epoch(generations=200, evaluator=dpnv, solution=dpnv)
#!/usr/bin/env python ### IMPORTS ### import sys, os from functools import partial sys.path.append(os.path.join(os.path.split(__file__)[0],'..','..')) from peas.methods.neat import NEATPopulation, NEATGenotype # from peas.methods.neatpythonwrapper import NEATPythonPopulation from peas.tasks.polebalance import PoleBalanceTask # Create a factory for genotypes (i.e. a function that returns a new # instance each time it is called) genotype = lambda: NEATGenotype(inputs=6, weight_range=(-50., 50.), types=['tanh']) # Create a population pop = NEATPopulation(genotype, popsize=150) # Create a task dpnv = PoleBalanceTask(velocities=True, max_steps=100000, penalize_oscillation=True) # Run the evolution, tell it to use the task as an evaluator pop.epoch(generations=100, evaluator=dpnv, solution=dpnv)
import constants as c import datetime import time from recorder import Recorder import sys, os import peas.peas as peas sys.path.append(os.path.join(os.path.split("netGenAlg.py")[0], '..', '..')) from peas.methods.neat import NEATPopulation, NEATGenotype genotypes = lambda: NEATGenotype( inputs=c.numOfLegs + 4, weight_range=(-50, 50), types=["tanh"]) pop = NEATPopulation(genotypes, popsize=c.popSize) pop.epoch(generations=c.numGens, evaluator=pop.Evaluate()) pop.epoch() envs = ENVIORNMENTS() parents = POPULATION(c.popSize) parents.Initialize() parents.Evaluate(envs, pb=True, pp=False) parents.Print() startTimes = [] copyTimes = [] evalTimes = [] generations = [] fillTimes = [] evolution_metrics = { 'fitness': [],