Esempio n. 1
0
    def __init__(self, traj,
                 optimizee_create_individual,
                 optimizee_fitness_weights,
                 parameters,
                 optimizee_bounding_func=None):
        super().__init__(traj, optimizee_create_individual=optimizee_create_individual,
                         optimizee_fitness_weights=optimizee_fitness_weights, parameters=parameters,
                         optimizee_bounding_func=optimizee_bounding_func)

        self.best_individual = None
        self.best_fitness = None

        sample_individual = self.optimizee_create_individual()

        # Generate parameter dictionary based on optimizee_param_grid
        self.param_list = {}
        _, optimizee_individual_param_spec = dict_to_list(sample_individual, get_dict_spec=True)
        self.optimizee_individual_dict_spec = optimizee_individual_param_spec

        optimizee_param_grid = parameters.param_grid
        # Assert validity of optimizee_param_grid
        assert set(sample_individual.keys()) == set(optimizee_param_grid.keys()), \
            "The Parameters of optimizee_param_grid don't match those of the optimizee individual"

        for param_name, param_type, param_length in optimizee_individual_param_spec:
            param_lower_bound, param_upper_bound, param_n_steps = optimizee_param_grid[param_name]
            if param_type == DictEntryType.Scalar:
                self.param_list[param_name] = np.linspace(param_lower_bound, param_upper_bound, param_n_steps + 1)
            elif param_type == DictEntryType.Sequence:
                curr_param_list = np.linspace(param_lower_bound, param_upper_bound, param_n_steps + 1)
                curr_param_list = np.meshgrid(*([curr_param_list] * param_length), indexing='ij')
                curr_param_list = [x.ravel() for x in curr_param_list]
                curr_param_list = np.stack(curr_param_list, axis=-1)
                self.param_list[param_name] = curr_param_list

        self.param_list = cartesian_product(self.param_list, tuple(sorted(optimizee_param_grid.keys())))
        self.size = len(self.param_list[list(self.param_list.keys())[0]])

        # Adding the bounds information to the trajectory
        traj.f_add_parameter_group('grid_spec')
        for param_name, param_grid_spec in optimizee_param_grid.items():
            traj.grid_spec.f_add_parameter(param_name + '.lower_bound', param_grid_spec[0])
            traj.grid_spec.f_add_parameter(param_name + '.uper_bound', param_grid_spec[1])
        traj.f_add_parameter('n_iteration', 1, comment='Grid search does only 1 iteration')
        #: The current generation number
        self.g = 0
        # Expanding the trajectory
        grouped_params_dict = {'individual.' + key: value for key, value in self.param_list.items()}
        final_params_dict = {'generation': [self.g],
                             'ind_idx': range(self.size)}
        final_params_dict.update(grouped_params_dict)
        traj.f_expand(cartesian_product(final_params_dict,
                                        [('ind_idx',) + tuple(grouped_params_dict.keys()), 'generation']))

        #: The population (i.e. list of individuals) to be evaluated at the next iteration
        self.eval_pop = None
Esempio n. 2
0
    def _expand_trajectory(self, traj):
        """
        Add as many explored runs as individuals that need to be evaluated. Furthermore, add the individuals as explored
        parameters.

        :param  ~l2l.utils.trajectory.Trajectory traj: The  trajectory that contains the parameters and the
            individual that we want to simulate. The individual is accessible using `traj.individual` and parameter e.g.
            param1 is accessible using `traj.param1`

        :return:
        """

        grouped_params_dict = get_grouped_dict(self.eval_pop)
        grouped_params_dict = {
            'individual.' + key: val
            for key, val in grouped_params_dict.items()
        }

        final_params_dict = {
            'generation': [self.g],
            'ind_idx': range(len(self.eval_pop))
        }
        final_params_dict.update(grouped_params_dict)

        # We need to convert them to lists or write our own custom IndividualParameter ;-)
        # Note the second argument to `cartesian_product`: This is for only having the cartesian product
        # between ``generation x (ind_idx AND individual)``, so that every individual has just one
        # unique index within a generation.
        traj.f_expand(
            cartesian_product(final_params_dict, [
                ('ind_idx', ) + tuple(grouped_params_dict.keys()), 'generation'
            ]))
Esempio n. 3
0
    def __init__(self,
                 traj,
                 optimizee_create_individual,
                 optimizee_fitness_weights,
                 parameters,
                 optimizee_bounding_func=None):
        super().__init__(
            traj,
            optimizee_create_individual=optimizee_create_individual,
            optimizee_fitness_weights=optimizee_fitness_weights,
            parameters=parameters,
            optimizee_bounding_func=optimizee_bounding_func)

        self.best_individual = None
        self.best_fitness = None

        sample_individual = self.optimizee_create_individual()

        # Generate parameter dictionary based on optimizee_param_grid
        self.param_list = {}
        _, optimizee_individual_param_spec = dict_to_list(sample_individual,
                                                          get_dict_spec=True)
        self.optimizee_individual_dict_spec = optimizee_individual_param_spec

        optimizee_param_grid = parameters.param_grid
        # Assert validity of optimizee_param_grid
        assert set(sample_individual.keys()) == set(optimizee_param_grid.keys()), \
            "The Parameters of optimizee_param_grid don't match those of the optimizee individual"

        for param_name, param_type, param_length in optimizee_individual_param_spec:
            param_lower_bound, param_upper_bound, param_n_steps = optimizee_param_grid[
                param_name]
            if param_type == DictEntryType.Scalar:
                self.param_list[param_name] = np.linspace(
                    param_lower_bound, param_upper_bound, param_n_steps)
            elif param_type == DictEntryType.Sequence:
                curr_param_list = np.linspace(param_lower_bound,
                                              param_upper_bound, param_n_steps)
                curr_param_list = np.meshgrid(*([curr_param_list] *
                                                param_length),
                                              indexing='ij')
                curr_param_list = [x.ravel() for x in curr_param_list]
                curr_param_list = np.stack(curr_param_list, axis=-1)
                self.param_list[param_name] = curr_param_list

        self.param_list = cartesian_product(
            self.param_list, tuple(sorted(optimizee_param_grid.keys())))

        # Adding the bounds information to the trajectory
        traj.f_add_parameter_group('grid_spec')
        for param_name, param_grid_spec in optimizee_param_grid.items():
            traj.f_add_parameter_to_group('grid_spec',
                                          param_name + '.lower_bound',
                                          param_grid_spec[0])
            traj.f_add_parameter_to_group('grid_spec',
                                          param_name + '.upper_bound',
                                          param_grid_spec[1])
            traj.f_add_parameter_to_group('grid_spec', param_name + '.step',
                                          param_grid_spec[2])

        # Expanding the trajectory
        self.param_list = {('individual.' + key): value
                           for key, value in self.param_list.items()}
        k0 = list(self.param_list.keys())[0]
        self.param_list['generation'] = [0]
        self.param_list['ind_idx'] = np.arange(len(self.param_list[k0]))

        traj.f_expand(self.param_list)
        traj.par['n_iteration'] = 1
        #: The current generation number
        self.g = 0
        #: The population (i.e. list of individuals) to be evaluated at the next iteration
        self.eval_pop = None  # self.param_list
        # Storing the fitness of the current individual
        self.current_fitness = -np.Inf
        self.traj = traj