def run(method, level, generations=500, popsize=500, visualize_individual=None): shape = (3, 3) task = TargetWeightsTask(substrate_shape=shape, noise=level, fitnessmeasure='sqerr') substrate = Substrate() substrate.add_nodes(shape, 'l') substrate.add_connections('l', 'l') if method == 'hyperneat': geno = lambda: NEATGenotype( feedforward=True, inputs=len(shape) * 2, weight_range=(-3.0, 3.0), prob_add_conn=0.3, prob_add_node=0.03, types=['sin', 'linear', 'gauss', 'sigmoid', 'abs']) pop = NEATPopulation(geno, popsize=popsize, target_species=8) developer = HyperNEATDeveloper(substrate=substrate, add_deltas=False, sandwich=False) elif method == '0hn': t = [(i, 4) for i in range(4)] geno = lambda: NEATGenotype( feedforward=True, inputs=len(shape) * 2, weight_range=(-3.0, 3.0), prob_add_conn=0.0, prob_add_node=0.00, topology=t, types=['sin', 'linear', 'gauss', 'sigmoid', 'abs']) pop = NEATPopulation(geno, popsize=popsize, target_species=8) developer = HyperNEATDeveloper(substrate=substrate, add_deltas=False, sandwich=False) elif method == 'wavelet': geno = lambda: WaveletGenotype(inputs=len(shape) * 2) pop = SimplePopulation(geno, popsize=popsize) developer = WaveletDeveloper(substrate=substrate, add_deltas=False, sandwich=False) results = pop.epoch(generations=generations, evaluator=partial(evaluate, task=task, developer=developer)) return results
def run(method, setup, generations=250, popsize=100): # Create task and genotype->phenotype converter size = 11 task_kwds = dict(size=size) if setup == 'big-little': task_kwds['targetshape'] = ShapeDiscriminationTask.makeshape('box', size//3) task_kwds['distractorshapes'] = [ShapeDiscriminationTask.makeshape('box', 1)] elif setup == 'triup-down': task_kwds['targetshape'] = np.triu(np.ones((size//3, size//3))) task_kwds['distractorshapes'] = [np.tril(np.ones((size//3, size//3)))] task = ShapeDiscriminationTask(**task_kwds) substrate = Substrate() substrate.add_nodes((size, size), 'l') substrate.add_connections('l', 'l') if method == 'wavelet': num_inputs = 6 if deltas else 4 geno = lambda: WaveletGenotype(inputs=num_inputs) pop = SimplePopulation(geno, popsize=popsize) developer = WaveletDeveloper(substrate=substrate, add_deltas=True, sandwich=True) else: geno_kwds = dict(feedforward=True, inputs=6, weight_range=(-3.0, 3.0), prob_add_conn=0.1, prob_add_node=0.03, bias_as_node=False, types=['sin', 'bound', 'linear', 'gauss', 'sigmoid', 'abs']) if method == 'nhn': pass elif method == '0hnmax': geno_kwds['max_nodes'] = 7 elif method == '1hnmax': geno_kwds['max_nodes'] = 8 geno = lambda: NEATGenotype(**geno_kwds) pop = NEATPopulation(geno, popsize=popsize, target_species=8) developer = HyperNEATDeveloper(substrate=substrate, sandwich=True, add_deltas=True, node_type='tanh') # Run and save results results = pop.epoch(generations=generations, evaluator=partial(evaluate, task=task, developer=developer), solution=partial(solve, task=task, developer=developer), ) return results
image = np.dstack(binary_channels + [red]) _, encoded = cv2.imencode('.png', image) image_bytes = bytearray(np.asarray(encoded)) client.send("Content-type: image/png\r\n") client.send("Content-Length: %d\r\n\r\n" % len(image_bytes)) client.send(image_bytes) client.send("\r\n--" + boundary + "\r\n") if __name__ == '__main__': from peas.methods.neat import NEATPopulation, NEATGenotype genotype = lambda: NEATGenotype(inputs=6, outputs=2, types=[ACTIVATION_FUNC], prob_add_node=0.1, weight_range=(-3, 3), stdev_mutate_weight=.25, stdev_mutate_bias=.25, stdev_mutate_response=.25, feedforward=False) pop = NEATPopulation(genotype, popsize=POPSIZE, target_species=TARGET_SPECIES, stagnation_age=5) local_ip = sys.argv[-2] log = {'neat': {}, 'generations': []} # log neat settings dummy_individual = genotype()
#!/usr/bin/env python ### IMPORTS ### import sys, os from functools import partial from collections import defaultdict sys.path.append(os.path.join(os.path.split(__file__)[0], '..', '..')) from peas.methods.neat import NEATPopulation, NEATGenotype # from peas.methods.neatpythonwrapper import NEATPythonPopulation from peas.tasks.xor import XORTask # Create a factory for genotypes (i.e. a function that returns a new # instance each time it is called) genotype = lambda: NEATGenotype( inputs=2, weight_range=(-3., 3.), types=['tanh']) # Create a population pop = NEATPopulation(genotype, popsize=150) # Create a task task = XORTask() nodecounts = defaultdict(int) for i in range(100): # Run the evolution, tell it to use the task as an evaluator pop.epoch(generations=100, evaluator=task, solution=task) nodecounts[len(pop.champions[-1].node_genes)] += 1 print(sorted(nodecounts.items()))
def run(method, setup, generations=15, popsize=10): """main function that drives the evolution""" # the following 'task_kwds' and 'task' variables should be ignored as they come from an example from the framework - I had no time to remove them yet. task_kwds = dict(field='eight', observation='eight_striped', max_steps=3000, friction_scale=0.1, damping=0.9, motor_torque=3, check_coverage=True, flush_each_step=False, initial_pos=(17, 256, np.pi*0.5)) #TODO: remove task = DetectorTask(**task_kwds) #TODO: remove # Detector has a specific topology: input 21x21; output 1x0 substrate = Substrate() substrate.add_nodes([(r, theta) for r in np.linspace(-10,10,21) for theta in np.linspace(-10, 10, 21)], 'input', is_input=True) substrate.add_nodes([(r, theta) for r in np.linspace(0,0,1) for theta in np.linspace(0, 0, 1)], 'output') substrate.add_connections('input', 'output', -1) # evolutionary parameters geno_kwds = dict(feedforward=True, inputs=441, outputs=1, max_depth=3, max_nodes=MAXIMUM_NODES, weight_range=(-3.0, 3.0), prob_add_conn=0.3, prob_add_node=0.1, bias_as_node=False, types=['sigmoid']) geno = lambda: NEATGenotype(**geno_kwds) pop = NEATPopulation(geno, popsize=popsize, target_species=8) # sigmoid activation is only used for the generated ANN, since 'hnn' package can has only sigmoid activations developer = HyperNEATDeveloper(substrate=substrate, add_deltas=False, sandwich=False, node_type='sigmoid') results = pop.epoch(generations=generations, evaluator=partial(evaluate, task=task, developer=developer), solution=partial(solve, task=task, developer=developer), ) # output best solutions from every generation into a file 'best_solution_N', where 'N' is the ID of the detector fitnesses = list() fifo = open(os.path.join(os.path.dirname(__file__), '../../best_solution_N'), 'a+') for champion in results['champions']: fitnesses.append(champion.stats['fitness']) phenotype = developer.convert(champion) # option to visualise the detector ANN. Use some sort of counter so that the images won't be overwritten #dir = os.path.dirname('../../visual_N.png') #if not os.path.exists(dir): # os.makedirs(dir) #phenotype.visualize('../../visual_N.png', inputs=441, outputs=1) rest = MAXIMUM_NODES - len((champion.get_network_data()[1])[1:]) rest_array = np.linspace(4., 4., rest) for idx, node_type in enumerate((champion.get_network_data()[1])[1:]): if (idx == len((champion.get_network_data()[1])[1:]) - 2): nodes_types_indexes.extend(rest_array) try: nodes_types_indexes.append(float(['sin', 'bound', 'linear', 'gauss', 'sigmoid', 'abs'].index(node_type))) except NameError: print "not defined!" fifo.write('fitness: '+str(champion.stats['fitness'])+' || ' + ' '.join(map(str, node_types)) + ' '+' '.join(map(str, phenotype.get_connectivity_matrix()))+'\n') fifo.close() # Visualize evolution in a graph import matplotlib.pyplot as plt from matplotlib.ticker import MaxNLocator plt.figure() x = range(len(results['champions'])) y = np.asarray(fitnesses) xa = plt.gca().get_xaxis() xa.set_major_locator(MaxNLocator(integer=True)) plt.plot(x, y) plt.axis('on') plt.savefig(os.path.join(os.getcwd(), '../../fitness_evolution_N'), bbox_inches='tight', pad_inches=0) plt.close() return results
#!/usr/bin/env python ### IMPORTS ### import sys, os from functools import partial from collections import defaultdict sys.path.append(os.path.join(os.path.split(__file__)[0],'..','..')) from peas.methods.neat import NEATPopulation, NEATGenotype # from peas.methods.neatpythonwrapper import NEATPythonPopulation from peas.tasks.xor import XORTask # Create a factory for genotypes (i.e. a function that returns a new # instance each time it is called) genotype = lambda: NEATGenotype(inputs=2, weight_range=(-3., 3.), types=['sigmoid2']) # Create a population pop = NEATPopulation(genotype, popsize=150) # Create a task task = XORTask() nodecounts = defaultdict(int) for i in xrange(100): # Run the evolution, tell it to use the task as an evaluator pop.epoch(generations=100, evaluator=task, solution=task) nodecounts[len(pop.champions[-1].node_genes)] += 1
def run(method, setup, generations=100, popsize=100): """ Use hyperneat for a walking gait task """ # Create task and genotype->phenotype converter if setup == 'easy': task_kwds = dict(field='eight', observation='eight', max_steps=3000, friction_scale=0.3, damping=0.3, motor_torque=10, check_coverage=False, flush_each_step=False, initial_pos=(282, 300, np.pi*0.35)) elif setup == 'hard': task_kwds = dict(field='eight', observation='eight_striped', max_steps=3000, friction_scale=0.3, damping=0.3, motor_torque=10, check_coverage=False, flush_each_step=True, force_global=True, initial_pos=(282, 300, np.pi*0.35)) elif setup == 'force': task_kwds = dict(field='eight', observation='eight', max_steps=3000, friction_scale=0.1, damping=0.9, motor_torque=3, check_coverage=True, flush_each_step=True, force_global=True, initial_pos=(17, 256, np.pi*0.5)) elif setup == 'prop': task_kwds = dict(field='eight', observation='eight_striped', max_steps=3000, friction_scale=0.3, damping=0.3, motor_torque=10, check_coverage=False, flush_each_step=False, initial_pos=(282, 300, np.pi*0.35)) elif setup == 'cover': task_kwds = dict(field='eight', observation='eight_striped', max_steps=3000, friction_scale=0.1, damping=0.9, motor_torque=3, check_coverage=True, flush_each_step=False, initial_pos=(17, 256, np.pi*0.5)) task = LineFollowingTask(**task_kwds) # The line following experiment has quite a specific topology for its network: substrate = Substrate() substrate.add_nodes([(0,0)], 'bias') substrate.add_nodes([(r, theta) for r in np.linspace(0,1,3) for theta in np.linspace(-1, 1, 5)], 'input') substrate.add_nodes([(r, theta) for r in np.linspace(0,1,3) for theta in np.linspace(-1, 1, 3)], 'layer') substrate.add_connections('input', 'layer',-1) substrate.add_connections('bias', 'layer', -2) substrate.add_connections('layer', 'layer',-3) if method == 'wvl': geno = lambda: WaveletGenotype(inputs=4, layers=3) pop = SimplePopulation(geno, popsize=popsize) developer = WaveletDeveloper(substrate=substrate, add_deltas=False, sandwich=False, node_type='tanh') else: geno_kwds = dict(feedforward=True, inputs=4, outputs=3, weight_range=(-3.0, 3.0), prob_add_conn=0.1, prob_add_node=0.03, bias_as_node=False, types=['sin', 'bound', 'linear', 'gauss', 'sigmoid', 'abs']) if method == 'nhn': pass elif method == '0hnmax': geno_kwds['max_nodes'] = 7 elif method == '1hnmax': geno_kwds['max_nodes'] = 8 geno = lambda: NEATGenotype(**geno_kwds) pop = NEATPopulation(geno, popsize=popsize, target_species=8) developer = HyperNEATDeveloper(substrate=substrate, add_deltas=False, sandwich=False, node_type='tanh') results = pop.epoch(generations=generations, evaluator=partial(evaluate, task=task, developer=developer), solution=partial(solve, task=task, developer=developer), ) return results
if not isinstance(network, NeuralNetwork): network = NeuralNetwork(network) score, steps = self._loop(network, test=True) score, steps = self._loop(network) return {'fitness': score / self.max_score, 'steps': steps} def solve(self, network): if not isinstance(network, NeuralNetwork): network = NeuralNetwork(network) score, steps = self._loop(network) if score < self.max_score: print("Failed... Score: ", score / self.max_score, " in ", steps, " Steps") return 0 return int(score > self.max_score) genotype = lambda: NEATGenotype( inputs=416, outputs=207, weight_range=(-50., 50.), types=['tanh']) pop = NEATPopulation(genotype, popsize=10) eval_task = GymSuperMario(max_steps=100, max_score=2000) pop.epoch(generations=100, evaluator=eval_task, solution=eval_task)
def run(method, splits, generations=500, popsize=500): complexity = 'half' splits = int(splits) funcs = [] if complexity in ['half', 'flat', 'slope']: funcs.append((True, np.random.random() * 6. - 3)) for num in range(splits): axis = random_direction_vector() offset = np.random.random() - 0.2 where = partial(area, axis=axis, offset=offset) if complexity == 'half': xs = 0 if num % 2 == 0 else 1 mp = 1 if (num // 2) % 2 == 0 else -1 if num < 2: d = 0 elif num < 2 + 4: d = 0.5 elif num < 2 + 4 + 4: d = 0.25 elif num < 2 + 4 + 4 + 4: d = 0.75 where = partial(split, axis=xs, flip=mp, distance=d) what = lambda c, v: v + np.random.random() * 6. - 3 funcs.append((where, what)) task = TargetWeightsTask(substrate_shape=(8, ), funcs=funcs, fitnessmeasure='sqerr', uniquefy=True) substrate = Substrate() substrate.add_nodes((8, ), 'l') substrate.add_connections('l', 'l') if method == 'hyperneat': geno = lambda: NEATGenotype( feedforward=True, inputs=2, weight_range=(-3.0, 3.0), prob_add_conn=0.3, prob_add_node=0.03, types=['sin', 'ident', 'gauss', 'sigmoid', 'abs']) pop = NEATPopulation(geno, popsize=popsize, target_species=8) developer = HyperNEATDeveloper(substrate=substrate, add_deltas=False, sandwich=False) elif method == '0hnmax': geno = lambda: NEATGenotype( feedforward=True, inputs=2, weight_range=(-3.0, 3.0), max_nodes=3, types=['sin', 'ident', 'gauss', 'sigmoid', 'abs']) pop = NEATPopulation(geno, popsize=popsize, target_species=8) developer = HyperNEATDeveloper(substrate=substrate, add_deltas=False, sandwich=False) elif method == 'wavelet': geno = lambda: WaveletGenotype(inputs=2) pop = SimplePopulation(geno, popsize=popsize) developer = WaveletDeveloper(substrate=substrate, add_deltas=False, sandwich=False) results = pop.epoch( generations=generations, evaluator=partial(evaluate, task=task, developer=developer), ) return results
genotype = lambda innovations={}: NEATGenotype( inputs=inputs, outputs=outputs, weight_range=weight_range, types=types, innovations=innovations, feedforward=feedforward, prob_add_node=prob_add_node, prob_add_conn=prob_add_conn, prob_mutate_weight=prob_mutate_weight, prob_reset_weight=prob_reset_weight, prob_reenable_conn=prob_reenable_conn, prob_disable_conn=prob_disable_conn, prob_reenable_parent=prob_reenable_parent, prob_mutate_bias=prob_mutate_bias, prob_mutate_response=prob_mutate_response, prob_mutate_type=prob_mutate_type, stdev_mutate_weight=stdev_mutate_weight, stdev_mutate_bias=stdev_mutate_bias, stdev_mutate_response=stdev_mutate_response, phys_dis_neat=phys_dis_neat, max_depth=max_depth, max_nodes=max_nodes, response_default=response_default, initial_weight_stdev=initial_weight_stdev, bias_as_node=bias_as_node, distance_excess=distance_excess, distance_disjoint=distance_disjoint, distance_weight=distance_weight) task = XORTask()
def start(self, evaluator, popsize, generations, max_motor_speed, foraging): if self.use_hyperneat: # HYPERNEAT NOT USED genotype = lambda innovations={}: NEATGenotype() else: genotype = lambda innovations={}: NEATGenotype( inputs=task.inputs, outputs=task.outputs, weight_range=task.weight_range, types=task.types, innovations=innovations, feedforward=task.feedforward, prob_add_node=task.prob_add_node, prob_add_conn=task.prob_add_conn, prob_mutate_weight=task.prob_mutate_weight, prob_reset_weight=task.prob_reset_weight, prob_reenable_conn=task.prob_reenable_conn, prob_disable_conn=task.prob_disable_conn, prob_reenable_parent=task.prob_reenable_parent, prob_mutate_bias=task.prob_mutate_bias, prob_mutate_response=task.prob_mutate_response, prob_mutate_type=task.prob_mutate_type, stdev_mutate_weight=task.stdev_mutate_weight, stdev_mutate_bias=task.stdev_mutate_bias, stdev_mutate_response=task.stdev_mutate_response, phys_dis_neat=task.phys_dis_neat, max_depth=task.max_depth, max_nodes=task.max_nodes, response_default=task.response_default, initial_weight_stdev=task.initial_weight_stdev, bias_as_node=task.bias_as_node, distance_excess=task.distance_excess, distance_disjoint=task.distance_disjoint, distance_weight=task.distance_weight) pop = NEATPopulation( genotype, popsize=evaluator.popsize, elitism=evaluator.elitism, compatibility_threshold=evaluator.compatibility_threshold, compatibility_threshold_delta=evaluator. compatibility_threshold_delta, target_species=evaluator.target_species, min_elitism_size=evaluator.min_elitism_size, young_age=evaluator.young_age, prob_mutate=evaluator.prob_mutate, young_multiplier=evaluator.young_multiplier, stagnation_age=evaluator.stagnation_age, old_age=evaluator.old_age, old_multiplier=evaluator.old_multiplier, tournament_selection_k=evaluator.tournament_selection_k, reset_innovations=evaluator.reset_innovations, survival=evaluator.survival, phys_dis_neat=evaluator.phys_dis_neat, sim_dis_neat=evaluator.sim_dis_neat, ip_address=self.ip) log = {'parameters': {}, 'generations': []} # log neat settings. log['parameters'] = { 'max_speed': max_motor_speed, 'inputs': evaluator.inputs, 'outputs': evaluator.outputs, 'weight_range': evaluator.weight_range, 'types': evaluator.types, 'feedforward': evaluator.feedforward, 'prob_add_node': evaluator.prob_add_node, 'prob_add_conn': evaluator.prob_add_conn, 'prob_mutate_weight': evaluator.prob_mutate_weight, 'prob_reset_weight': evaluator.prob_reset_weight, 'prob_reenable_conn': evaluator.prob_reenable_conn, 'prob_disable_conn': evaluator.prob_disable_conn, 'prob_reenable_parent': evaluator.prob_reenable_parent, 'prob_mutate_bias': evaluator.prob_mutate_bias, 'prob_mutate_response': evaluator.prob_mutate_response, 'prob_mutate_type': evaluator.prob_mutate_type, 'stdev_mutate_weight': evaluator.stdev_mutate_weight, 'stdev_mutate_bias': evaluator.stdev_mutate_bias, 'stdev_mutate_response': evaluator.stdev_mutate_response, 'phys_dis_neat': evaluator.phys_dis_neat, 'max_depth': evaluator.max_depth, 'max_nodes': evaluator.max_nodes, 'response_default': evaluator.response_default, 'initial_weight_stdev': evaluator.initial_weight_stdev, 'bias_as_node': evaluator.bias_as_node, 'distance_excess': evaluator.distance_excess, 'distance_disjoint': evaluator.distance_disjoint, 'distance_weight': evaluator.distance_weight, 'popsize': evaluator.popsize, 'elitism': evaluator.elitism, 'compatibility_threshold': evaluator.compatibility_threshold, 'compatibility_threshold_delta': evaluator.compatibility_threshold_delta, 'target_species': evaluator.target_species, 'min_elitism_size': evaluator.min_elitism_size, 'young_age': evaluator.young_age, 'prob_mutate ': evaluator.prob_mutate, 'young_multiplier': evaluator.young_multiplier, 'stagnation_age': evaluator.stagnation_age, 'old_age': evaluator.old_age, 'old_multiplier': evaluator.old_multiplier, 'tournament_selection_k': evaluator.tournament_selection_k, 'reset_innovations': evaluator.reset_innovations, 'survival': evaluator.survival, 'phys_dis_neat': evaluator.phys_dis_neat, 'sim_dis_neat': evaluator.sim_dis_neat, 'ip_address': self.ip } log['parameters'].update(self.evaluator.logs()) dbus.mainloop.glib.DBusGMainLoop(set_as_default=True) bus = dbus.SessionBus() thymioController = dbus.Interface( bus.get_object('ch.epfl.mobots.Aseba', '/'), dbus_interface='ch.epfl.mobots.AsebaNetwork') thymioController.LoadScripts(AESL_PATH, reply_handler=dbusReply, error_handler=dbusError) # switch thymio LEDs off thymioController.SendEventName('SetColor', [0, 0, 0, 0], reply_handler=dbusReply, error_handler=dbusError) # thresholds are set > SHOULD BE IN FORAGING if foraging == True: self.detector = object_detector.ObjectDetector( 0.4, 0.01, thymioController) self.evaluator.logger.info( str(self.ip), 'Puck_threshold:' + str(self.detector.has_puck_threshold)) self.evaluator.logger.info( str(self.ip), 'Goal_threshold:' + str(self.detector.has_goal_threshold2)) task = self.evaluator task.set_thymio_controller(thymioController) ctrl_serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) ctrl_serversocket.bind((self.ip, 1337)) ctrl_serversocket.listen(5) ctrl_client = None img_serversocket = None img_client = None def set_client(): global ctrl_client print 'Control server: waiting for socket connections...' (ctrl_client, address) = ctrl_serversocket.accept() task.set_ctrl_client(ctrl_client) print 'Control server: got connection from', address thread.start_new_thread(set_client, ()) if self.use_img_client: img_serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) img_serversocket.bind((self.ip, 31337)) img_serversocket.listen(5) def set_img_client(): global img_client print 'Image server: waiting for socket connections...' (img_client, address) = img_serversocket.accept() print 'Image server: got connection from', address write_header(img_client) thread.start_new_thread(set_img_client, ()) def epoch_callback(population): # update log dump population_backup = population.giveBackUp() species_backup = population.giveBackUpSpecies() generation = { 'individuals': [], 'phenotypes': [], 'gen_number': population.generation } for individual in population_backup: copied_connections = { str(key): value for key, value in individual.conn_genes.items() } generation['individuals'].append({ 'node_genes': deepcopy(individual.node_genes), 'conn_genes': copied_connections, 'stats': deepcopy(individual.stats) }) if self.use_hyperneat: for phenotype in population.phenotype_backup: generation['phenotypes'].append({ 'cm': deepcopy(phenotype.cm), 'act': deepcopy(phenotype.act) }) generation['species_id'] = [ species.id for species in species_backup ] generation['species_size'] = [ len(species.members) for species in species_backup ] log['generations'].append(generation) #task.getLogger().info(', '.join([str(ind.stats['fitness']) for ind in population_backup.population])) outputDir = os.path.join(OUTPUT_PATH, self.experiment_name) if population.generation == 1: self.firstdate = time.strftime("%d-%m-%y_%H-%M") date = time.strftime("%d-%m-%y_%H-%M") jsonLogFilename = os.path.join( outputDir, self.experiment_name + '_' + date + '.json') with open(jsonLogFilename, 'w') as f: #print(outputDir, self.experiment_name + '_' + date + '.json') json.dump(log, f, cls=CustomEncoder) # update clean file summary filename = os.path.join( outputDir, self.experiment_name + '_' + self.firstdate + '_cleanlog.txt') #print filename index = 1 for individual in population_backup: msg = "%d\t%d\t%f\t%d\t%d\t%d\t%d\t%d\t%d\t" % ( population.generation, index, individual.stats['fitness'], len(individual.node_genes), len(individual.conn_genes), individual.stats['specieid'], individual.stats['id'], individual.stats['parent1'], individual.stats['parent2']) index += 1 with open(filename, 'a') as f: f.write(msg + "\n") try: evaluator = lambda evaluee: task.evaluate(NeuralNetwork(evaluee)) \ if hasattr(evaluee, 'get_network_data') \ else task.evaluate(evaluee) converter = hn.create_converter( task.substrate()) if self.use_hyperneat else lambda x: x pop.epoch(generations=task.generations, evaluator=evaluator, solution=evaluator, callback=epoch_callback, converter=converter) except KeyboardInterrupt: release_resources(task.thymioController, ctrl_serversocket, ctrl_client, img_serversocket, img_client) sys.exit(1) release_resources(task.thymioController, ctrl_serversocket, ctrl_client, img_serversocket, img_client) sys.exit(0)
#!/usr/bin/env python ### IMPORTS ### import sys, os from functools import partial sys.path.append(os.path.join(os.path.split(__file__)[0], '..', '..')) from peas.methods.neat import NEATPopulation, NEATGenotype # from peas.methods.neatpythonwrapper import NEATPythonPopulation from peas.tasks.polebalance import PoleBalanceTask # Create a factory for genotypes (i.e. a function that returns a new # instance each time it is called) genotype = lambda: NEATGenotype( inputs=6, weight_range=(-50., 50.), types=['tanh'], feedforward=False) # Create a population pop = NEATPopulation(genotype, popsize=200) # Create a task dpnv = PoleBalanceTask(velocities=True, max_steps=100000, penalize_oscillation=True) # Run the evolution, tell it to use the task as an evaluator pop.epoch(generations=200, evaluator=dpnv, solution=dpnv)
#!/usr/bin/env python ### IMPORTS ### import sys, os from functools import partial sys.path.append(os.path.join(os.path.split(__file__)[0],'..','..')) from peas.methods.neat import NEATPopulation, NEATGenotype # from peas.methods.neatpythonwrapper import NEATPythonPopulation from peas.tasks.polebalance import PoleBalanceTask # Create a factory for genotypes (i.e. a function that returns a new # instance each time it is called) genotype = lambda: NEATGenotype(inputs=6, weight_range=(-50., 50.), types=['tanh']) # Create a population pop = NEATPopulation(genotype, popsize=150) # Create a task dpnv = PoleBalanceTask(velocities=True, max_steps=100000, penalize_oscillation=True) # Run the evolution, tell it to use the task as an evaluator pop.epoch(generations=100, evaluator=dpnv, solution=dpnv)
def init( reservoir_size=100, reservoir_sparsity=0.1, input_range=(-1, 1), neat_iterations=1000, neat_population_size=100, esn_repetitions=3, initial_weights_std=0.12, neat_state_file="neat_progress.pickle", #neuroevolution state file start_anew=True ): #either start and initialize new neat run or load earlier started one global Scores reset_population = start_anew if not start_anew: #loads earlier started neuroevolution state as to continue it with open(neat_state_file, "rb") as input_file: Scores = dill.load(input_file) pop = dill.load(input_file) task = dill.load(input_file) else: #Defining node amounts for ESN res_units = reservoir_size in_units = 1 out_units = 301 ESN_arch = [in_units, res_units, out_units] sparsity = reservoir_sparsity neat_iterations = neat_iterations neat_population_size = neat_population_size Scores = { "fitness": [], "lyapunov": [], "mc": [], "std_mc": [], "mmse": [], "std_mmse": [], "narma": [], "std_narma": [], "spectral_radius": [] } topology = random_topology(res_units + in_units, sparsity=sparsity) genotype = lambda: NEATGenotype( inputs=in_units, outputs=0, topology=topology, types=['tanh'], feedforward=False, prob_add_node=0, prob_add_conn=0, #max_nodes = 51, initial_weight_stdev=initial_weights_std, prob_mutate_weight=0.2, stdev_mutate_weight=0.02, prob_reset_weight=0.008, prob_mutate_bias=0.1, stdev_mutate_bias=0.01, bias_as_node=False, prob_reenable_conn=0, prob_disable_conn=0, #prob_reenable_parent=0.25, #weight_range=(-5., 5.), #distances for speciation distance_excess=1.0, distance_disjoint=1.0, distance_weight=1.0) # Create a population pop = NEATPopulation(genotype, popsize=neat_population_size, target_species=neat_population_size / neat_population_size, compatibility_threshold=0.01, compatibility_threshold_delta=0.001) # Create a task task = ESNTask_external(ESN_arch, input_range, esn_repetitions) #task = ESNTask_internal() #instead for just testing NEAT return task, pop, neat_iterations, reset_population, neat_state_file
from individual import INDIVIDUAL import copy import pickle from population import POPULATION from environments import ENVIORNMENTS import constants as c import datetime import time from recorder import Recorder import sys, os import peas.peas as peas sys.path.append(os.path.join(os.path.split("netGenAlg.py")[0], '..', '..')) from peas.methods.neat import NEATPopulation, NEATGenotype genotypes = lambda: NEATGenotype( inputs=c.numOfLegs + 4, weight_range=(-50, 50), types=["tanh"]) pop = NEATPopulation(genotypes, popsize=c.popSize) pop.epoch(generations=c.numGens, evaluator=pop.Evaluate()) pop.epoch() envs = ENVIORNMENTS() parents = POPULATION(c.popSize) parents.Initialize() parents.Evaluate(envs, pb=True, pp=False) parents.Print() startTimes = [] copyTimes = [] evalTimes = []