def init_process(self): """ This function is used to initialize the pool so each process has its own instance of the evaluator :return: """ global evaluator evaluator = Evaluator(self.parameters)
def __init__(self, parameters): self.parameters = parameters self.bd_extractor = BehaviorDescriptor(self.parameters) self.generation = 1 if self.parameters.multiprocesses: global main_pool main_pool = mp.Pool(initializer=self.init_process, processes=self.parameters.multiprocesses) self.evaluator = Evaluator(self.parameters) self.population = Population(self.parameters, init_size=self.parameters.pop_size) self.init_pop = True self.offsprings = None if self.parameters.exp_type == 'NS': self.evolver = NoveltySearch(self.parameters) elif self.parameters.exp_type == 'SIGN': self.evolver = NoveltySearch(self.parameters) elif self.parameters.exp_type == 'CMA-ES': self.evolver = CMAES(self.parameters) # Generate CMA-ES initial population del self.population self.population = Population( self.parameters, init_size=self.parameters.emitter_population) for agent in self.population: agent['genome'] = self.evolver.optimizer.ask() elif self.parameters.exp_type == 'CMA-NS': self.evolver = CMANS(self.parameters) self.reward_archive = self.evolver.rew_archive elif self.parameters.exp_type == 'NSGA-II': self.evolver = NSGAII(self.parameters) elif self.parameters.exp_type == 'SERENE': self.evolver = SERENE(self.parameters) elif self.parameters.exp_type == 'ME': self.evolver = MAPElites(self.parameters) elif self.parameters.exp_type == 'CMA-ME': self.evolver = CMAME(self.parameters) elif self.parameters.exp_type == 'RND': self.evolver = RandomSearch(self.parameters) else: print("Experiment type {} not implemented.".format( self.parameters.exp_type)) raise ValueError
def __init__(self, exp_path, multip=False, agents=None): """ Constructor """ self.params = parameters.Params() self.params.load(os.path.join(exp_path, '_params.json')) self.exp_path = exp_path self.agents = agents self.bd_extractor = BehaviorDescriptor(self.params) self.traj_to_obs = registered_envs[self.params.env_name]['traj_to_obs'] self.mp = multip if self.mp: global main_pool main_pool = mp.Pool(initializer=self.init_process, processes=int(os.cpu_count())) else: self.evaluator = Evaluator(self.params) if not os.path.exists(os.path.join(self.exp_path, 'analyzed_data')): os.mkdir(os.path.join(self.exp_path, 'analyzed_data'))
def __init__(self, parameters): self.parameters = parameters self.bd_extractor = BehaviorDescriptor(self.parameters) self.generation = 0 if self.parameters.multiprocesses: global main_pool main_pool = mp.Pool(initializer=self.init_process, processes=self.parameters.multiprocesses) else: self.evaluator = Evaluator(self.parameters) self.evolver = NoveltySearch(self.parameters) self.population = Population(self.parameters, init_size=self.parameters.pop_size) self.offsprings = None self.ns_archive = self.evolver.archive
from core import Board, Player, Evaluator def show_board(board): black_stonenum = len([1 for i in board.stones if i == -1]) white_stonenum = len([1 for i in board.stones if i == 1]) print("black: {0} vs {1} :white".format(black_stonenum, white_stonenum)) dispboard = ["2" if stone == -1 else "1" if stone == 1 else " " for stone in board.stones] for i in xrange(0, 8): offset = i * 8 print(dispboard[offset:offset+8]) if __name__ == '__main__': evaluator = Evaluator() player = Player(evaluator) board = Board() while not board.check_gameover(): mpos = player.move(board, -1) show_board(board) epos = player.move(board, 1) show_board(board)
class EvalArchive(object): """ This function is used to evaluate the archive of an experiment """ def __init__(self, exp_path, multip=False, agents=None): """ Constructor """ self.params = parameters.Params() self.params.load(os.path.join(exp_path, '_params.json')) self.exp_path = exp_path self.agents = agents self.bd_extractor = BehaviorDescriptor(self.params) self.traj_to_obs = registered_envs[self.params.env_name]['traj_to_obs'] self.mp = multip if self.mp: global main_pool main_pool = mp.Pool(initializer=self.init_process, processes=int(os.cpu_count())) else: self.evaluator = Evaluator(self.params) if not os.path.exists(os.path.join(self.exp_path, 'analyzed_data')): os.mkdir(os.path.join(self.exp_path, 'analyzed_data')) def init_process(self): """ This function is used to initialize the pool so each process has its own instance of the evaluator :return: """ global evaluator evaluator = Evaluator(self.params) def _get_eval_traj(self, genome): """ This function feeds the archive genome to the evaluator to get the traj of observations :param genome: :return: """ global evaluator _, data_traj = evaluator.evaluate({'genome': genome}) obs_traj = self.traj_to_obs(data_traj) infos = [] for idx, t in enumerate(data_traj): infos.append(t[3]) return (obs_traj, infos) def load_eval_archive(self, generation=None): """ This function loads and evaluates the archives in the exp folder :param generation: Generation to evaluate. If None, evaluates the archive for all the generations :return: Trajectory of observations """ genomes = utils.load_arch_data(self.exp_path, info=['genome'], generation=generation, params=self.params) if self.agents is not None: for generation in genomes: idx = list(range(len(genomes[generation]['genome']))) np.random.shuffle(idx) genomes[generation]['genome'] = [ genomes[generation]['genome'][i] for i in idx[:self.agents] ] gen_obs_traj = {} gen_info_traj = {} if generation is None: bar = Bar( 'Generations:', max=len(genomes), suffix= '[%(index)d/%(max)d] - Avg time per epoch: %(avg).3fs - Elapsed: %(elapsed_td)s' ) else: bar = None global main_pool print("Starting evaluation...") for gen in genomes: if bar is not None: bar.next() if self.mp: trajs = main_pool.map(self._get_eval_traj, genomes[gen]['genome']) obs_trajs = [t[0] for t in trajs] info_trajs = [t[1] for t in trajs] else: obs_trajs = [] info_trajs = [] for genome in genomes[gen]['genome']: _, data_traj = self.evaluator.evaluate( {'genome': genome}) # , action_coupled=self.action_coupled) obs_trajs.append(self.traj_to_obs(data_traj)) info_trajs.append([t[3] for t in data_traj]) gen_obs_traj[gen] = obs_trajs gen_info_traj[gen] = info_trajs print("Done") return gen_obs_traj, gen_info_traj def get_metrics(self, observations, infos, generation=None): """ This function calculates the metrics used for plotting. :param trajectories: of all the agents in the run :param dist: Either Frechet or TW. Default Frechet :param generation: Generation from which the trajectories are from :return: """ ts_bins = np.linspace(0, 1, num=100, endpoint=True) max_len = registered_envs[self.params.env_name]['max_steps'] grid_parameters = registered_envs[self.params.env_name]['grid'] # GT_BD is the gorund truth bd that is used to calculate the CVG if self.params.env_name == 'Dummy': self.gt_bd_extractor = dummy_gt_bd elif self.params.env_name == 'Walker2D': self.gt_bd_extractor = dummy_gt_bd descriptors = np.array([ self.gt_bd_extractor(obs, info, max_len) for obs, info in zip(observations[generation], infos[generation]) ]) if generation is None: name_gt_bd = 'gt_bd_all_gens.pkl' else: name_gt_bd = 'gt_bd_gen_{}.pkl'.format(generation) with open(os.path.join(self.exp_path, 'analyzed_data', name_gt_bd), 'wb') as f: pkl.dump(descriptors, f) print('Calculating CVG and UNIF') hist, grid = utils.get_grid(descriptors, grid_parameters) cvg = utils.calculate_coverage(grid) unif = utils.calculate_uniformity(hist) if generation is None: name_cvg = "cvg_all_gens.pkl" name_unif = "unif_all_gens.pkl" else: name_cvg = "cvg_gen_{}.pkl".format(generation) name_unif = "unif_gen_{}.pkl".format(generation) with open(os.path.join(self.exp_path, 'analyzed_data', name_cvg), 'wb') as f: pkl.dump(np.array(cvg), f) with open(os.path.join(self.exp_path, 'analyzed_data', name_unif), 'wb') as f: pkl.dump(np.array(unif), f) print("Done.") def save_trajectories(self, data, data_type='traj', generation=None): """ Saves the trajectories in a single file :param data: trajectory dict :param data_type: name of type of data being saved :param generation: generation for which the trajs have been evaluated :return: """ if generation is None: name = "archive_{}_all_gens.pkl".format(data_type) else: name = "archive_{}_gen_{}.pkl".format(data_type, generation) with open(os.path.join(self.exp_path, 'analyzed_data', name), 'wb') as f: pkl.dump(data, f)