コード例 #1
0
    def generate_parameters(self, parameter_id, **kwargs):
        """
        This function will returns a dict of trial (hyper-)parameters.
        If no trial configration for now, self.credit plus 1 to send the config later

        Parameters
        ----------
        parameter_id : int

        Returns
        -------
        dict
            One newly generated configuration.
        """
        if not self.population:
            raise RuntimeError('The population is empty')

        if self.num_running_trials >= self.population_size:
            logger.warning(
                "No enough trial config, population_size is suggested to be larger than trialConcurrency"
            )
            self.credit += 1
            self.param_ids.append(parameter_id)
            raise nni.NoMoreTrialError('no more parameters now.')

        return self._generate_individual(parameter_id)
コード例 #2
0
ファイル: pbt_tuner.py プロジェクト: microsoft/nni
    def generate_parameters(self, parameter_id, **kwargs):
        """
        Generate parameters, if no trial configration for now, self.credit plus 1 to send the config later

        Parameters
        ----------
        parameter_id : int
            Unique identifier for requested hyper-parameters.
            This will later be used in :meth:`receive_trial_result`.
        **kwargs
            Not used

        Returns
        -------
        dict
            One newly generated configuration

        """
        if self.pos == self.population_size - 1:
            logger.debug('Credit added by one in parameters request')
            self.credit += 1
            self.param_ids.append(parameter_id)
            raise nni.NoMoreTrialError('No more parameters now.')
        self.pos += 1
        trial_info = self.population[self.pos]
        trial_info.parameter_id = parameter_id
        self.running[parameter_id] = trial_info
        logger.info('Generate parameter : %s', trial_info.hyper_parameters)
        return trial_info.hyper_parameters
コード例 #3
0
    def generate_parameters(self, parameter_id, **kwargs):
        """
        Generate parameters for one trial.

        Parameters
        ----------
        parameter_id : int
            The id for the generated hyperparameter
        **kwargs
            Not used

        Returns
        -------
        dict
            One configuration from the expanded search space.

        Raises
        ------
        NoMoreTrialError
            If all the configurations has been sent, raise :class:`~nni.NoMoreTrialError`.
        """
        self.count += 1
        while self.count <= len(self.expanded_search_space) - 1:
            _params_tuple = convert_dict2tuple(
                copy.deepcopy(self.expanded_search_space[self.count]))
            if _params_tuple in self.supplement_data:
                self.count += 1
            else:
                return self.expanded_search_space[self.count]
        raise nni.NoMoreTrialError('no more parameters now.')
コード例 #4
0
ファイル: ppo_tuner.py プロジェクト: zyzzu/nni
    def generate_parameters(self, parameter_id, **kwargs):
        """
        Generate parameters, if no trial configration for now, self.credit plus 1 to send the config later

        Parameters
        ----------
        parameter_id : int
            Unique identifier for requested hyper-parameters.
            This will later be used in :meth:`receive_trial_result`.
        **kwargs
            Not used

        Returns
        -------
        dict
            One newly generated configuration

        """
        if self.first_inf:
            self.trials_result = [None for _ in range(self.inf_batch_size)]
            mb_obs, mb_actions, mb_values, mb_neglogpacs, mb_dones, last_values = self.model.inference(self.inf_batch_size)
            self.trials_info = TrialsInfo(mb_obs, mb_actions, mb_values, mb_neglogpacs,
                                          mb_dones, last_values, self.inf_batch_size)
            self.first_inf = False

        trial_info_idx, actions = self.trials_info.get_next()
        if trial_info_idx is None:
            logger.debug('Credit added by one in parameters request')
            self.credit += 1
            self.param_ids.append(parameter_id)
            raise nni.NoMoreTrialError('no more parameters now.')

        self.running_trials[parameter_id] = trial_info_idx
        new_config = self._actions_to_config(actions)
        return new_config
コード例 #5
0
ファイル: gridsearch_tuner.py プロジェクト: wangdaiyin/nni
 def generate_parameters(self, parameter_id):
     self.count += 1
     while (self.count <= len(self.expanded_search_space)-1):
         _params_tuple = convert_dict2tuple(self.expanded_search_space[self.count])
         if _params_tuple in self.supplement_data:
             self.count += 1
         else:
             return self.expanded_search_space[self.count]
     raise nni.NoMoreTrialError('no more parameters now.')
コード例 #6
0
ファイル: opevo.py プロジェクト: toydogcat/nni
    def generate_parameters(self, parameter_id, **kwargs):
        """Method which provides one set of hyper-parameters.

        Override of the abstract method in :class:`~nni.tuner.Tuner`.
        """
        if self.serve_list:
            self.wait_dict[parameter_id] = self.serve_list.pop()
            return self.wait_dict[parameter_id].pick_out()
        else:
            self.request_list.append(parameter_id)
            raise nni.NoMoreTrialError('no more parameters now.')
コード例 #7
0
    def generate_parameters(self, parameter_id):
        """Returns a dict of trial (hyper-)parameters, as a serializable object.

        Parameters
        ----------
        parameter_id : int
        """
        self.count += 1
        if self.count > len(self.values) - 1:
            raise nni.NoMoreTrialError('no more parameters now.')
        return self.values[self.count]
コード例 #8
0
    def generate_parameters(self, *args, **kwargs):
        while True:
            params = self._suggest()
            if params is None:
                raise nni.NoMoreTrialError('Search space fully explored')
            params = deformat_parameters(params, self.space)

            params_str = nni.dump(params, sort_keys=True)
            if params_str not in self.history:
                self.history.add(params_str)
                return params
コード例 #9
0
ファイル: batch_tuner.py プロジェクト: JSong-Jia/nni-1
    def generate_parameters(self, parameter_id, **kwargs):
        """Returns a dict of trial (hyper-)parameters, as a serializable object.

        Parameters
        ----------
        parameter_id : int

        Returns
        -------
        dict
            A candidate parameter group.
        """
        self._count += 1
        if self._count > len(self._values) - 1:
            raise nni.NoMoreTrialError('no more parameters now.')
        return self._values[self._count]
コード例 #10
0
ファイル: dedup.py プロジェクト: yinfupai/nni
    def __call__(
            self,
            formatted_parameters: FormattedParameters) -> FormattedParameters:
        if self._never_dup or self._not_dup(formatted_parameters):
            return formatted_parameters

        if self._grid_search is None:
            _logger.info(
                f'Tuning algorithm generated duplicate parameter: {formatted_parameters}'
            )
            _logger.info(f'Use grid search for deduplication.')
            self._init_grid_search()

        while True:
            new = self._grid_search._suggest()  # type: ignore
            if new is None:
                raise nni.NoMoreTrialError()
            if self._not_dup(new):
                return new
コード例 #11
0
ファイル: ppo_tuner.py プロジェクト: chingloong/nni
    def generate_parameters(self, parameter_id, **kwargs):
        """
        generate parameters, if no trial configration for now, self.credit plus 1 to send the config later
        """
        if self.first_inf:
            self.trials_result = [None for _ in range(self.inf_batch_size)]
            mb_obs, mb_actions, mb_values, mb_neglogpacs, mb_dones, last_values = self.model.inference(self.inf_batch_size)
            self.trials_info = TrialsInfo(mb_obs, mb_actions, mb_values, mb_neglogpacs,
                                          mb_dones, last_values, self.inf_batch_size)
            self.first_inf = False

        trial_info_idx, actions = self.trials_info.get_next()
        if trial_info_idx is None:
            self.credit += 1
            self.param_ids.append(parameter_id)
            raise nni.NoMoreTrialError('no more parameters now.')

        self.running_trials[parameter_id] = trial_info_idx
        new_config = self._actions_to_config(actions)
        return new_config
コード例 #12
0
    def generate_parameters(self, parameter_id, trial_job_id=None, pos=None):
        if pos is None:
            if not self.bucket:
                if self.num_completed_jobs < self.total_steps:
                    raise nni.NoMoreTrialError('no more parameters now.')
                else:
                    self.generate_one_epoch_parameters()
            pos = self.bucket.pop()
        logger.info('current bucket: %s', self.bucket)
        logger.info('current pos: %s', pos)
        self.parameter_id2pos[parameter_id] = pos
        current_arc_code = self.child_arc[pos]
        start_idx = 0
        current_config = dict()

        def onehot2list(l):
            return [idx for idx, val in enumerate(l) if val == 1]

        for layer_id, (layer_name, info) in enumerate(self.search_space):
            mutable_block = info['mutable_block']
            if mutable_block not in current_config:
                current_config[mutable_block] = dict()
            layer_choice_idx = current_arc_code[start_idx]
            if layer_id != 0:
                input_start = start_idx + 1
            else:
                input_start = start_idx
            inputs_idxs = current_arc_code[input_start:input_start + layer_id]
            inputs_idxs = onehot2list(inputs_idxs)
            current_config[mutable_block][layer_name] = dict()
            current_config[mutable_block][layer_name]['chosen_layer'] = info[
                'layer_choice'][layer_choice_idx]
            current_config[mutable_block][layer_name]['chosen_inputs'] = [
                info['optional_inputs'][ipi] for ipi in inputs_idxs
            ]
            start_idx += 1 + layer_id

        return current_config
コード例 #13
0
ファイル: smac_tuner.py プロジェクト: yinfupai/nni
    def generate_parameters(self, parameter_id, **kwargs):
        """
        Generate one instance of hyperparameters (i.e., one configuration).
        Get one from SMAC3's ``challengers``.

        Parameters
        ----------
        parameter_id : int
            Unique identifier for requested hyper-parameters. This will later be used in :meth:`receive_trial_result`.
        **kwargs
            Not used

        Returns
        -------
        dict
            One newly generated configuration
        """
        if self.first_one:
            init_challenger = self.smbo_solver.nni_smac_start()
            self.total_data[parameter_id] = init_challenger
            return self.param_postprocess(init_challenger.get_dictionary())
        else:
            challengers = self.smbo_solver.nni_smac_request_challengers()
            challengers_empty = True
            for challenger in challengers:
                challengers_empty = False
                if self.dedup:
                    match = [v for k, v in self.total_data.items() \
                             if v.get_dictionary() == challenger.get_dictionary()]
                    if match:
                        continue
                self.total_data[parameter_id] = challenger
                return self.param_postprocess(challenger.get_dictionary())
            assert challengers_empty is False, 'The case that challengers is empty is not handled.'
            self.logger.info('In generate_parameters: No more new parameters.')
            raise nni.NoMoreTrialError('No more new parameters.')
コード例 #14
0
ファイル: gridsearch_tuner.py プロジェクト: zwt233/nni
 def generate_parameters(self, parameter_id):
     self.count += 1
     if self.count > len(self.expanded_search_space) - 1:
         raise nni.NoMoreTrialError('no more parameters now.')
     return self.expanded_search_space[self.count]
コード例 #15
0
ファイル: batch_tuner.py プロジェクト: zenghanfu/nni
 def generate_parameters(self, parameter_id):
     self.count += 1
     if self.count > len(self.values) - 1:
         raise nni.NoMoreTrialError('no more parameters now.')
     return self.values[self.count]