def run(self): run_trial = create_remote_fn(self.experiment) meta_spec = self.experiment.spec['meta'] ray.init(**meta_spec.get('resources', {})) register_ray_serializer() max_generation = meta_spec['max_generation'] pop_size = meta_spec['max_trial'] or calc_population_size( self.experiment) logger.info( f'EvolutionarySearch max_generation: {max_generation}, population size: {pop_size}' ) trial_data_dict = {} config_hash = {} # config hash_str to trial_index toolbox = self.init_deap() population = toolbox.population(n=pop_size) for gen in range(1, max_generation + 1): logger.info(f'Running generation: {gen}/{max_generation}') ray_id_to_config = {} pending_ids = [] for individual in population: config = dict(individual.items()) hash_str = util.to_json(config, indent=0) if hash_str not in config_hash: trial_index = self.experiment.info_space.tick( 'trial')['trial'] config_hash[hash_str] = config['trial_index'] = trial_index ray_id = run_trial.remote(self.experiment, config) ray_id_to_config[ray_id] = config pending_ids.append(ray_id) individual['trial_index'] = config_hash[hash_str] trial_data_dict.update( get_ray_results(pending_ids, ray_id_to_config)) for individual in population: trial_index = individual.pop('trial_index') trial_data = trial_data_dict.get( trial_index, {'fitness': 0}) # if trial errored individual.fitness.values = trial_data['fitness'], preview = 'Fittest of population preview:' for individual in tools.selBest(population, k=min(10, pop_size)): preview += f'\nfitness: {individual.fitness.values[0]}, {individual}' logger.info(preview) # prepare offspring for next generation if gen < max_generation: population = toolbox.select(population, len(population)) # Vary the pool of individuals population = algorithms.varAnd(population, toolbox, cxpb=0.5, mutpb=0.5) ray.shutdown() return trial_data_dict
def run(self): meta_spec = self.experiment.spec['meta'] ray.init(**meta_spec.get('resources', {})) max_generation = meta_spec['max_generation'] pop_size = meta_spec['max_trial'] or calc_population_size(self.experiment) logger.info(f'EvolutionarySearch max_generation: {max_generation}, population size: {pop_size}') trial_data_dict = {} config_hash = {} # config hash_str to trial_index toolbox = self.init_deap() population = toolbox.population(n=pop_size) for gen in range(1, max_generation + 1): logger.info(f'Running generation: {gen}/{max_generation}') ray_id_to_config = {} pending_ids = [] for individual in population: config = dict(individual.items()) hash_str = util.to_json(config, indent=0) if hash_str not in config_hash: trial_index = self.experiment.info_space.tick('trial')['trial'] config_hash[hash_str] = config['trial_index'] = trial_index ray_id = run_trial.remote(self.experiment, config) ray_id_to_config[ray_id] = config pending_ids.append(ray_id) individual['trial_index'] = config_hash[hash_str] trial_data_dict.update(get_ray_results(pending_ids, ray_id_to_config)) for individual in population: trial_index = individual.pop('trial_index') trial_data = trial_data_dict.get(trial_index, {'fitness': 0}) # if trial errored individual.fitness.values = trial_data['fitness'], preview = 'Fittest of population preview:' for individual in tools.selBest(population, k=min(10, pop_size)): preview += f'\nfitness: {individual.fitness.values[0]}, {individual}' logger.info(preview) # prepare offspring for next generation if gen < max_generation: population = toolbox.select(population, len(population)) # Vary the pool of individuals population = algorithms.varAnd(population, toolbox, cxpb=0.5, mutpb=0.5) ray.worker.cleanup() return trial_data_dict
def __str__(self): return 'body: ' + util.to_json(util.get_class_attr(self))