예제 #1
0
    def send_trial(self, parameters, placement_constraint=None):
        """
        Send parameters to NNI.

        Parameters
        ----------
        parameters : Any
            Any payload.

        Returns
        -------
        int
            Parameter ID that is assigned to this parameter,
            which will be used for identification in future.
        """
        self.parameters_count += 1
        if placement_constraint is None:
            placement_constraint = {'type': 'None', 'gpus': []}
        self._validate_placement_constraint(placement_constraint)
        new_trial = {
            'parameter_id': self.parameters_count,
            'parameters': parameters,
            'parameter_source': 'algorithm',
            'placement_constraint': placement_constraint
        }
        _logger.debug('New trial sent: %s', new_trial)
        send(CommandType.NewTrialJob, nni.dump(new_trial))
        if self.send_trial_callback is not None:
            self.send_trial_callback(parameters)  # pylint: disable=not-callable
        return self.parameters_count
예제 #2
0
파일: bohb_advisor.py 프로젝트: maxpark/nni
    def handle_initialize(self, data):
        """Initialize Tuner, including creating Bayesian optimization-based parametric models
        and search space formations

        Parameters
        ----------
        data: search space
            search space of this experiment

        Raises
        ------
        ValueError
            Error: Search space is None
        """
        logger.info('start to handle_initialize')
        # convert search space jason to ConfigSpace
        self.handle_update_search_space(data)

        # generate BOHB config_generator using Bayesian optimization
        if self.search_space:
            self.cg = CG_BOHB(configspace=self.search_space,
                              min_points_in_model=self.min_points_in_model,
                              top_n_percent=self.top_n_percent,
                              num_samples=self.num_samples,
                              random_fraction=self.random_fraction,
                              bandwidth_factor=self.bandwidth_factor,
                              min_bandwidth=self.min_bandwidth)
        else:
            raise ValueError('Error: Search space is None')
        # generate first brackets
        self.generate_new_bracket()
        send(CommandType.Initialized, '')
예제 #3
0
    def send_trial(self, parameters):
        """
        Send parameters to NNI.

        Parameters
        ----------
        parameters : Any
            Any payload.

        Returns
        -------
        int
            Parameter ID that is assigned to this parameter,
            which will be used for identification in future.
        """
        self.parameters_count += 1
        new_trial = {
            'parameter_id': self.parameters_count,
            'parameters': parameters,
            'parameter_source': 'algorithm'
        }
        _logger.debug('New trial sent: %s', new_trial)
        send(CommandType.NewTrialJob, json_dumps(new_trial))
        if self.send_trial_callback is not None:
            self.send_trial_callback(parameters)  # pylint: disable=not-callable
        return self.parameters_count
예제 #4
0
파일: bohb_advisor.py 프로젝트: maxpark/nni
    def _get_one_trial_job(self):
        """get one trial job, i.e., one hyperparameter configuration.

        If this function is called, Command will be sent by BOHB:
        a. If there is a parameter need to run, will return "NewTrialJob" with a dict:
        {
            'parameter_id': id of new hyperparameter
            'parameter_source': 'algorithm'
            'parameters': value of new hyperparameter
        }
        b. If BOHB don't have parameter waiting, will return "NoMoreTrialJobs" with
        {
            'parameter_id': '-1_0_0',
            'parameter_source': 'algorithm',
            'parameters': ''
        }
        """
        if not self.generated_hyper_configs:
            ret = {
                'parameter_id': '-1_0_0',
                'parameter_source': 'algorithm',
                'parameters': ''
            }
            send(CommandType.NoMoreTrialJobs, nni.dump(ret))
            return None
        assert self.generated_hyper_configs
        params = self.generated_hyper_configs.pop(0)
        ret = {
            'parameter_id': params[0],
            'parameter_source': 'algorithm',
            'parameters': params[1]
        }
        self.parameters[params[0]] = params[1]
        return ret
 def handle_initialize(self, data):
     """callback for initializing the advisor
     Parameters
     ----------
     data: dict
         search space
     """
     self.handle_update_search_space(data)
     send(CommandType.Initialized, '')
예제 #6
0
 def handle_request_trial_jobs(self, data):
     """
     Parameters
     ----------
     data: int
         number of trial jobs
     """
     for _ in range(data):
         ret = self._get_one_trial_job()
         send(CommandType.NewTrialJob, json_tricks.dumps(ret))
예제 #7
0
    def handle_report_metric_data(self, data):
        """
        Parameters
        ----------
        data:
            it is an object which has keys 'parameter_id', 'value', 'trial_job_id', 'type', 'sequence'.

        Raises
        ------
        ValueError
            Data type not supported
        """
        if 'value' in data:
            data['value'] = nni.load(data['value'])
        # multiphase? need to check
        if data['type'] == MetricType.REQUEST_PARAMETER:
            assert multi_phase_enabled()
            assert data['trial_job_id'] is not None
            assert data['parameter_index'] is not None
            assert data['trial_job_id'] in self.job_id_para_id_map
            self._handle_trial_end(
                self.job_id_para_id_map[data['trial_job_id']])
            ret = self._get_one_trial_job()
            if data['trial_job_id'] is not None:
                ret['trial_job_id'] = data['trial_job_id']
            if data['parameter_index'] is not None:
                ret['parameter_index'] = data['parameter_index']
            self.job_id_para_id_map[data['trial_job_id']] = ret['parameter_id']
            send(CommandType.SendTrialJobParameter, nni.dump(ret))
        else:
            value = extract_scalar_reward(data['value'])
            bracket_id, i, _ = data['parameter_id'].split('_')

            # add <trial_job_id, parameter_id> to self.job_id_para_id_map here,
            # because when the first parameter_id is created, trial_job_id is not known yet.
            if data['trial_job_id'] in self.job_id_para_id_map:
                assert self.job_id_para_id_map[
                    data['trial_job_id']] == data['parameter_id']
            else:
                self.job_id_para_id_map[
                    data['trial_job_id']] = data['parameter_id']

            if data['type'] == MetricType.FINAL:
                # sys.maxsize indicates this value is from FINAL metric data, because data['sequence'] from FINAL metric
                # and PERIODICAL metric are independent, thus, not comparable.
                self.brackets[bracket_id].set_config_perf(
                    int(i), data['parameter_id'], sys.maxsize, value)
                self.completed_hyper_configs.append(data)
            elif data['type'] == MetricType.PERIODICAL:
                self.brackets[bracket_id].set_config_perf(
                    int(i), data['parameter_id'], data['sequence'], value)
            else:
                raise ValueError('Data type not supported: {}'.format(
                    data['type']))
예제 #8
0
파일: bohb_advisor.py 프로젝트: maxpark/nni
 def _send_new_trial(self):
     while self.unsatisfied_jobs:
         ret = self._get_one_trial_job()
         if ret is None:
             break
         one_unsatisfied = self.unsatisfied_jobs.pop(0)
         ret['trial_job_id'] = one_unsatisfied['trial_job_id']
         ret['parameter_index'] = one_unsatisfied['parameter_index']
         # update parameter_id in self.job_id_para_id_map
         self.job_id_para_id_map[ret['trial_job_id']] = ret['parameter_id']
         send(CommandType.SendTrialJobParameter, nni.dump(ret))
     for _ in range(self.credit):
         self._request_one_trial_job()
예제 #9
0
파일: integration.py 프로젝트: maxpark/nni
    def send_trial(self, parameters, placement_constraint=None):
        """
        Send parameters to NNI.

        Parameters
        ----------
        parameters : Any
            Any payload.

        Returns
        -------
        int
            Parameter ID that is assigned to this parameter,
            which will be used for identification in future.
        """
        self.parameters_count += 1
        if placement_constraint is None:
            placement_constraint = {
                'type': 'None',
                'gpus': []
            }
        self._validate_placement_constraint(placement_constraint)
        new_trial = {
            'parameter_id': self.parameters_count,
            'parameters': parameters,
            'parameter_source': 'algorithm',
            'placement_constraint': placement_constraint,
            'version_info': version_dump()
        }
        _logger.debug('New trial sent: %s', new_trial)

        try:
            send_payload = nni.dump(new_trial, pickle_size_limit=int(os.getenv('PICKLE_SIZE_LIMIT', 64 * 1024)))
        except PayloadTooLarge:
            raise ValueError(
                'Serialization failed when trying to dump the model because payload too large (larger than 64 KB). '
                'This is usually caused by pickling large objects (like datasets) by mistake. '
                'See the full error traceback for details and https://nni.readthedocs.io/en/stable/NAS/Serialization.html '
                'for how to resolve such issue. '
            )

        # trial parameters can be super large, disable pickle size limit here
        # nevertheless, there could still be blocked by pipe / nni-manager
        send(CommandType.NewTrialJob, send_payload)

        if self.send_trial_callback is not None:
            self.send_trial_callback(parameters)  # pylint: disable=not-callable
        return self.parameters_count
예제 #10
0
    def _get_one_trial_job(self):
        """get one trial job, i.e., one hyperparameter configuration."""
        if not self.generated_hyper_configs:
            if self.exec_mode == 'parallelism' or \
               (self.exec_mode == 'serial' and (self.curr_bracket_id is None or self.brackets[self.curr_bracket_id].is_completed())):
                if self.curr_s < 0:
                    self.curr_s = self.s_max
                    self.curr_hb += 1
                _logger.debug(
                    'create a new bracket, self.curr_hb=%d, self.curr_s=%d',
                    self.curr_hb, self.curr_s)
                self.curr_bracket_id = '{}-{}'.format(self.curr_hb,
                                                      self.curr_s)
                self.brackets[self.curr_bracket_id] = Bracket(
                    self.curr_bracket_id, self.curr_s, self.s_max, self.eta,
                    self.R, self.optimize_mode)
                next_n, next_r = self.brackets[self.curr_bracket_id].get_n_r()
                _logger.debug('new bracket, next_n=%d, next_r=%d', next_n,
                              next_r)
                assert self.searchspace_json is not None and self.random_state is not None
                generated_hyper_configs = self.brackets[
                    self.curr_bracket_id].get_hyperparameter_configurations(
                        next_n, next_r, self.searchspace_json,
                        self.random_state)
                self.generated_hyper_configs = generated_hyper_configs.copy()
                self.curr_s -= 1
            else:
                ret = {
                    'parameter_id': '-1_0_0',
                    'parameter_source': 'algorithm',
                    'parameters': ''
                }
                send(CommandType.NoMoreTrialJobs, nni.dump(ret))
                return None

        assert self.generated_hyper_configs
        params = self.generated_hyper_configs.pop(0)
        ret = {
            'parameter_id': params[0],
            'parameter_source': 'algorithm',
            'parameters': params[1]
        }
        return ret
예제 #11
0
파일: bohb_advisor.py 프로젝트: maxpark/nni
    def _request_one_trial_job(self):
        """get one trial job, i.e., one hyperparameter configuration.

        If this function is called, Command will be sent by BOHB:
        a. If there is a parameter need to run, will return "NewTrialJob" with a dict:
        {
            'parameter_id': id of new hyperparameter
            'parameter_source': 'algorithm'
            'parameters': value of new hyperparameter
        }
        b. If BOHB don't have parameter waiting, will return "NoMoreTrialJobs" with
        {
            'parameter_id': '-1_0_0',
            'parameter_source': 'algorithm',
            'parameters': ''
        }
        """
        ret = self._get_one_trial_job()
        if ret is not None:
            send(CommandType.NewTrialJob, nni.dump(ret))
            self.credit -= 1
예제 #12
0
    def test_assessor(self):
        pass
        _reverse_io()
        send(
            CommandType.ReportMetricData,
            '{"trial_job_id":"A","type":"PERIODICAL","sequence":0,"value":"2"}'
        )
        send(
            CommandType.ReportMetricData,
            '{"trial_job_id":"B","type":"PERIODICAL","sequence":0,"value":"2"}'
        )
        send(
            CommandType.ReportMetricData,
            '{"trial_job_id":"A","type":"PERIODICAL","sequence":1,"value":"3"}'
        )
        send(CommandType.TrialEnd,
             '{"trial_job_id":"A","event":"SYS_CANCELED"}')
        send(CommandType.TrialEnd, '{"trial_job_id":"B","event":"SUCCEEDED"}')
        send(CommandType.NewTrialJob, 'null')
        _restore_io()

        assessor = NaiveAssessor()
        dispatcher = MsgDispatcher(None, assessor)
        msg_dispatcher_base._worker_fast_exit_on_terminate = False

        dispatcher.run()
        e = dispatcher.worker_exceptions[0]
        self.assertIs(type(e), AssertionError)
        self.assertEqual(e.args[0],
                         'Unsupported command: CommandType.NewTrialJob')

        self.assertEqual(_trials, ['A', 'B', 'A'])
        self.assertEqual(_end_trials, [('A', False), ('B', True)])

        _reverse_io()
        command, data = receive()
        self.assertIs(command, CommandType.KillTrialJob)
        self.assertEqual(data, '"A"')
        self.assertEqual(len(_out_buf.read()), 0)
예제 #13
0
    def test_msg_dispatcher(self):
        _reverse_io()  # now we are sending to Tuner's incoming stream
        send(CommandType.RequestTrialJobs, '2')
        send(CommandType.ReportMetricData,
             '{"parameter_id":0,"type":"PERIODICAL","value":"10"}')
        send(CommandType.ReportMetricData,
             '{"parameter_id":1,"type":"FINAL","value":"11"}')
        send(CommandType.UpdateSearchSpace, '{"name":"SS0"}')
        send(CommandType.RequestTrialJobs, '1')
        send(CommandType.KillTrialJob, 'null')
        _restore_io()

        tuner = NaiveTuner()
        dispatcher = MsgDispatcher(tuner)
        msg_dispatcher_base._worker_fast_exit_on_terminate = False

        dispatcher.run()
        e = dispatcher.worker_exceptions[0]
        self.assertIs(type(e), AssertionError)
        self.assertEqual(e.args[0],
                         'Unsupported command: CommandType.KillTrialJob')

        _reverse_io()  # now we are receiving from Tuner's outgoing stream
        self._assert_params(0, 2, [], None)
        self._assert_params(1, 4, [], None)

        self._assert_params(2, 6, [[1, 4, 11, False]], {'name': 'SS0'})

        self.assertEqual(len(_out_buf.read()), 0)  # no more commands
예제 #14
0
파일: bohb_advisor.py 프로젝트: maxpark/nni
    def handle_report_metric_data(self, data):
        """reveice the metric data and update Bayesian optimization with final result

        Parameters
        ----------
        data:
            it is an object which has keys 'parameter_id', 'value', 'trial_job_id', 'type', 'sequence'.

        Raises
        ------
        ValueError
            Data type not supported
        """
        logger.debug('handle report metric data = %s', data)
        if 'value' in data:
            data['value'] = nni.load(data['value'])
        if data['type'] == MetricType.REQUEST_PARAMETER:
            assert multi_phase_enabled()
            assert data['trial_job_id'] is not None
            assert data['parameter_index'] is not None
            assert data['trial_job_id'] in self.job_id_para_id_map
            self._handle_trial_end(
                self.job_id_para_id_map[data['trial_job_id']])
            ret = self._get_one_trial_job()
            if ret is None:
                self.unsatisfied_jobs.append({
                    'trial_job_id':
                    data['trial_job_id'],
                    'parameter_index':
                    data['parameter_index']
                })
            else:
                ret['trial_job_id'] = data['trial_job_id']
                ret['parameter_index'] = data['parameter_index']
                # update parameter_id in self.job_id_para_id_map
                self.job_id_para_id_map[
                    data['trial_job_id']] = ret['parameter_id']
                send(CommandType.SendTrialJobParameter, nni.dump(ret))
        else:
            assert 'value' in data
            value = extract_scalar_reward(data['value'])
            if self.optimize_mode is OptimizeMode.Maximize:
                reward = -value
            else:
                reward = value
            assert 'parameter_id' in data
            s, i, _ = data['parameter_id'].split('_')
            logger.debug('bracket id = %s, metrics value = %s, type = %s', s,
                         value, data['type'])
            s = int(s)

            # add <trial_job_id, parameter_id> to self.job_id_para_id_map here,
            # because when the first parameter_id is created, trial_job_id is not known yet.
            if data['trial_job_id'] in self.job_id_para_id_map:
                assert self.job_id_para_id_map[
                    data['trial_job_id']] == data['parameter_id']
            else:
                self.job_id_para_id_map[
                    data['trial_job_id']] = data['parameter_id']

            assert 'type' in data
            if data['type'] == MetricType.FINAL:
                # and PERIODICAL metric are independent, thus, not comparable.
                assert 'sequence' in data
                self.brackets[s].set_config_perf(int(i), data['parameter_id'],
                                                 sys.maxsize, value)
                self.completed_hyper_configs.append(data)

                _parameters = self.parameters[data['parameter_id']]
                _parameters.pop(_KEY)
                # update BO with loss, max_s budget, hyperparameters
                self.cg.new_result(loss=reward,
                                   budget=data['sequence'],
                                   parameters=_parameters,
                                   update_model=True)
            elif data['type'] == MetricType.PERIODICAL:
                self.brackets[s].set_config_perf(int(i), data['parameter_id'],
                                                 data['sequence'], value)
            else:
                raise ValueError('Data type not supported: {}'.format(
                    data['type']))
예제 #15
0
 def test_send_en(self):
     out_file = _prepare_send()
     send(CommandType.NewTrialJob, 'CONTENT')
     self.assertEqual(out_file.getvalue(), b'TR00000000000007CONTENT')
예제 #16
0
 def _request_one_trial_job(self):
     ret = self._get_one_trial_job()
     if ret is not None:
         send(CommandType.NewTrialJob, json_tricks.dumps(ret))
         self.credit -= 1
예제 #17
0
 def test_send_zh(self):
     out_file = _prepare_send()
     send(CommandType.NewTrialJob, '你好')
     self.assertEqual(out_file.getvalue(),
                      'TR00000000000006你好'.encode('utf8'))
예제 #18
0
 def mark_experiment_as_ending(self):
     send(CommandType.NoMoreTrialJobs, '')