Example #1
0
    def generate_parameters(self, parameter_id, **kwargs):
        """
        Generate parameters, if no trial configration for now, self.credit plus 1 to send the config later

        Parameters
        ----------
        parameter_id : int
            Unique identifier for requested hyper-parameters.
            This will later be used in :meth:`receive_trial_result`.
        **kwargs
            Not used

        Returns
        -------
        dict
            One newly generated configuration

        """
        if self.pos == self.population_size - 1:
            logger.debug('Credit added by one in parameters request')
            self.credit += 1
            self.param_ids.append(parameter_id)
            raise nni.NoMoreTrialError('No more parameters now.')
        self.pos += 1
        trial_info = self.population[self.pos]
        trial_info.parameter_id = parameter_id
        self.running[parameter_id] = trial_info
        logger.info('Generate parameter : %s', trial_info.hyper_parameters)
        return split_index(trial_info.hyper_parameters)
Example #2
0
    def update_search_space(self, search_space):
        """
        Get search space

        Parameters
        ----------
        search_space : dict
            Search space
        """
        logger.info('Update search space %s', search_space)
        self.searchspace_json = search_space
        self.space = json2space(self.searchspace_json)

        self.random_state = np.random.RandomState()
        self.population = []
        is_rand = dict()

        for item in self.space:
            is_rand[item] = True

        for i in range(self.population_size):
            hyper_parameters = json2parameter(
                self.searchspace_json, is_rand, self.random_state)
            hyper_parameters = split_index(hyper_parameters)
            checkpoint_dir = os.path.join(self.all_checkpoint_dir, str(i))
            hyper_parameters['load_checkpoint_dir'] = os.path.join(checkpoint_dir, str(self.epoch))
            hyper_parameters['save_checkpoint_dir'] = os.path.join(checkpoint_dir, str(self.epoch))
            self.population.append(TrialInfo(checkpoint_dir=checkpoint_dir, hyper_parameters=hyper_parameters))
Example #3
0
    def _generate_individual(self, parameter_id):
        """
        This function will generate the config for a trial.
        If at the first generation, randomly generates individuals to satisfy self.population_size.
        Otherwise, random choose a pair of individuals and compare their fitnesses.
        The worst of the pair will be removed. Copy the best of the pair and mutate it to generate a new individual.

        Parameters
        ----------

        parameter_id : int

        Returns
        -------
        dict
            A group of candidate parameters that evolution tuner generated.
        """
        pos = -1

        for i in range(len(self.population)):
            if self.population[i].result is None:
                pos = i
                break

        if pos != -1:
            indiv = copy.deepcopy(self.population[pos])
            self.population.pop(pos)
        else:
            random.shuffle(self.population)
            # avoid only 1 individual has result
            if len(
                    self.population
            ) > 1 and self.population[0].result < self.population[1].result:
                self.population[0] = self.population[1]

            # mutation on the worse individual
            space = json2space(self.searchspace_json,
                               self.population[0].config)
            is_rand = dict()
            mutation_pos = space[random.randint(0, len(space) - 1)]

            for i in range(len(self.space)):
                is_rand[self.space[i]] = (self.space[i] == mutation_pos)
            config = json2parameter(self.searchspace_json, is_rand,
                                    self.random_state,
                                    self.population[0].config)

            if len(self.population) > 1:
                self.population.pop(1)

            indiv = Individual(config=config)

        # remove "_index" from config and save params-id
        self.running_trials[parameter_id] = indiv
        config = split_index(indiv.config)
        return config
Example #4
0
 def test_split_index_nested(self):
     """test for nested search space
     """
     nested_params_with_index = {
         "layer0": {
             "_name": "Avg_pool",
             "pooling_size": {
                 "_index": 1,
                 "_value": 2
             }
         },
         "layer1": {
             "_name": "Empty"
         },
         "layer2": {
             "_name": "Max_pool",
             "pooling_size": {
                 "_index": 2,
                 "_value": 3
             }
         },
         "layer3": {
             "_name": "Conv",
             "kernel_size": {
                 "_index": 3,
                 "_value": 5
             },
             "output_filters": {
                 "_index": 3,
                 "_value": 64
             }
         }
     }
     nested_params = {
         "layer0": {
             "_name": "Avg_pool",
             "pooling_size": 2
         },
         "layer1": {
             "_name": "Empty"
         },
         "layer2": {
             "_name": "Max_pool",
             "pooling_size": 3
         },
         "layer3": {
             "_name": "Conv",
             "kernel_size": 5,
             "output_filters": 64
         }
     }
     params = split_index(nested_params_with_index)
     self.assertEqual(params, nested_params)
Example #5
0
    def generate_parameters(self, parameter_id, **kwargs):
        """
        This function will returns a dict of trial (hyper-)parameters, as a serializable object.

        Parameters
        ----------
        parameter_id : int

        Returns
        -------
        dict
            A group of candaidte parameters that evolution tuner generated.
        """
        if not self.population:
            raise RuntimeError('The population is empty')

        pos = -1

        for i in range(len(self.population)):
            if self.population[i].result is None:
                pos = i
                break

        if pos != -1:
            indiv = copy.deepcopy(self.population[pos])
            self.population.pop(pos)
            total_config = indiv.config
        else:
            random.shuffle(self.population)
            if self.population[0].result < self.population[1].result:
                self.population[0] = self.population[1]

            # mutation
            space = json2space(self.searchspace_json,
                               self.population[0].config)
            is_rand = dict()
            mutation_pos = space[random.randint(0, len(space) - 1)]

            for i in range(len(self.space)):
                is_rand[self.space[i]] = (self.space[i] == mutation_pos)
            config = json2parameter(self.searchspace_json, is_rand,
                                    self.random_state,
                                    self.population[0].config)
            self.population.pop(1)
            # remove "_index" from config and save params-id

            total_config = config

        self.total_data[parameter_id] = total_config
        config = split_index(total_config)

        return config
Example #6
0
    def generate_parameters(self, parameter_id, **kwargs):
        total_params = self._get_suggestion(random_search=False)
        # avoid generating same parameter with concurrent trials because hyperopt doesn't support parallel mode
        if total_params in self.total_data.values():
            # but it can cause duplicate parameter rarely
            total_params = self._get_suggestion(random_search=True)
        self.total_data[parameter_id] = total_params

        if self.parallel:
            self.running_data.append(parameter_id)

        params = split_index(total_params)
        return params
Example #7
0
    def test_split_index_normal(self):
        """test for normal search space
        """
        normal__params_with_index = {
            "dropout_rate": {
                "_index": 1,
                "_value": 0.9
            },
            "hidden_size": {
                "_index": 1,
                "_value": 512
            }
        }
        normal__params = {"dropout_rate": 0.9, "hidden_size": 512}

        params = split_index(normal__params_with_index)
        self.assertEqual(params, normal__params)
Example #8
0
    def generate_parameters(self, parameter_id, **kwargs):
        """
        Returns a set of trial (hyper-)parameters, as a serializable object.

        Parameters
        ----------
        parameter_id : int

        Returns
        -------
        params : dict
        """
        total_params = self.get_suggestion(random_search=False)
        # avoid generating same parameter with concurrent trials because hyperopt doesn't support parallel mode
        if total_params in self.total_data.values():
            # but it can cause deplicate parameter rarely
            total_params = self.get_suggestion(random_search=True)
        self.total_data[parameter_id] = total_params
        params = split_index(total_params)
        return params
Example #9
0
    def receive_trial_result(self, parameter_id, parameters, value, **kwargs):
        """
        Receive trial's result. if the number of finished trials equals ``self.population_size``, start the next epoch to
        train the model.

        Parameters
        ----------
        parameter_id : int
            Unique identifier of used hyper-parameters, same with :meth:`generate_parameters`.
        parameters : dict
            Hyper-parameters generated by :meth:`generate_parameters`.
        value : dict
            Result from trial (the return value of :func:`nni.report_final_result`).
        """
        logger.info('Get one trial result, id = %d, value = %s', parameter_id,
                    value)
        value = extract_scalar_reward(value)
        if self.optimize_mode == OptimizeMode.Minimize:
            value = -value
        trial_info = self.running.pop(parameter_id, None)
        trial_info.score = value
        self.finished.append(trial_info)
        self.finished_trials += 1
        if self.finished_trials == self.population_size:
            logger.info('Proceeding to next epoch')
            self.epoch += 1
            self.population = []
            self.pos = -1
            self.running = {}
            #exploit and explore
            self.finished = sorted(self.finished,
                                   key=lambda x: x.score,
                                   reverse=True)
            cutoff = int(np.ceil(self.fraction * len(self.finished)))
            tops = self.finished[:cutoff]
            bottoms = self.finished[self.finished_trials - cutoff:]
            for bottom in bottoms:
                top = np.random.choice(tops)
                exploit_and_explore(bottom, top, self.factor,
                                    self.resample_probability, self.epoch,
                                    self.searchspace_json)
            for trial in self.finished:
                if trial not in bottoms:
                    trial.clean_id()
                    trial.hyper_parameters[
                        'load_checkpoint_dir'] = trial.hyper_parameters[
                            'save_checkpoint_dir']
                    trial.hyper_parameters[
                        'save_checkpoint_dir'] = os.path.join(
                            trial.checkpoint_dir, str(self.epoch))
            self.finished_trials = 0
            for _ in range(self.population_size):
                trial_info = self.finished.pop()
                self.population.append(trial_info)
            while self.credit > 0 and self.pos + 1 < len(self.population):
                self.credit -= 1
                self.pos += 1
                parameter_id = self.param_ids.pop()
                trial_info = self.population[self.pos]
                trial_info.parameter_id = parameter_id
                self.running[parameter_id] = trial_info
                self.send_trial_callback(
                    parameter_id, split_index(trial_info.hyper_parameters))