Esempio n. 1
0
 def write_file(self):
     '''save config to local file'''
     try:
         with open(self.experiment_file, 'w') as file:
             nni.dump(self.experiments, file, indent=4)
     except IOError as error:
         print('Error:', error)
         return ''
Esempio n. 2
0
def test_type():
    assert nni.dump(
        torch.optim.Adam) == '{"__nni_type__": "path:torch.optim.adam.Adam"}'
    assert nni.load(
        '{"__nni_type__": "path:torch.optim.adam.Adam"}') == torch.optim.Adam
    assert Foo == nni.load(nni.dump(Foo))
    assert nni.dump(math.floor) == '{"__nni_type__": "path:math.floor"}'
    assert nni.load('{"__nni_type__": "path:math.floor"}') == math.floor
Esempio n. 3
0
def test_basic_unit_and_custom_import():
    module = ImportTest(3, 0.5)
    ss = nni.dump(module)
    assert ss == r'{"__symbol__": "path:imported.model.ImportTest", "__kwargs__": {"foo": 3, "bar": 0.5}}'
    assert nni.load(nni.dump(module)) == module

    import nni.retiarii.nn.pytorch as nn
    module = nn.Conv2d(3, 10, 3, bias=False)
    ss = nni.dump(module)
    assert ss == r'{"__symbol__": "path:torch.nn.modules.conv.Conv2d", "__kwargs__": {"in_channels": 3, "out_channels": 10, "kernel_size": 3, "bias": false}}'
    assert nni.load(ss).bias is None
Esempio n. 4
0
def test_custom_class():
    module = nni.trace(Foo)(3)
    assert nni.load(nni.dump(module)) == module
    module = nni.trace(Foo)(b=2, a=1)
    assert nni.load(nni.dump(module)) == module

    module = nni.trace(Foo)(Foo(1), 5)
    dumped_module = nni.dump(module)
    module = nni.load(dumped_module)
    assert module.bb[0] == module.bb[999] == 6

    module = nni.trace(Foo)(nni.trace(Foo)(1), 5)
    dumped_module = nni.dump(module)
    assert nni.load(dumped_module) == module
Esempio n. 5
0
def test_custom_class():
    module = nni.trace(Foo)(3)
    assert nni.load(nni.dump(module)) == module
    module = nni.trace(Foo)(b=2, a=1)
    assert nni.load(nni.dump(module)) == module

    module = nni.trace(Foo)(Foo(1), 5)
    dumped_module = nni.dump(module)
    assert len(
        dumped_module
    ) > 200  # should not be too longer if the serialization is correct

    module = nni.trace(Foo)(nni.trace(Foo)(1), 5)
    dumped_module = nni.dump(module)
    assert nni.load(dumped_module) == module
Esempio n. 6
0
 def import_data(self, data):
     # TODO
     # use tuple to dedup in case of order/precision issue causes matching failed
     # and remove `epoch_bar` to use uniform dedup mechanism
     for trial in data:
         params_str = nni.dump(trial['parameter'], sort_keys=True)
         self.history.add(params_str)
Esempio n. 7
0
    def send_trial(self, parameters, placement_constraint=None):
        """
        Send parameters to NNI.

        Parameters
        ----------
        parameters : Any
            Any payload.

        Returns
        -------
        int
            Parameter ID that is assigned to this parameter,
            which will be used for identification in future.
        """
        self.parameters_count += 1
        if placement_constraint is None:
            placement_constraint = {'type': 'None', 'gpus': []}
        self._validate_placement_constraint(placement_constraint)
        new_trial = {
            'parameter_id': self.parameters_count,
            'parameters': parameters,
            'parameter_source': 'algorithm',
            'placement_constraint': placement_constraint
        }
        _logger.debug('New trial sent: %s', new_trial)
        send(CommandType.NewTrialJob, nni.dump(new_trial))
        if self.send_trial_callback is not None:
            self.send_trial_callback(parameters)  # pylint: disable=not-callable
        return self.parameters_count
Esempio n. 8
0
    def _get_one_trial_job(self):
        """get one trial job, i.e., one hyperparameter configuration.

        If this function is called, Command will be sent by BOHB:
        a. If there is a parameter need to run, will return "NewTrialJob" with a dict:
        {
            'parameter_id': id of new hyperparameter
            'parameter_source': 'algorithm'
            'parameters': value of new hyperparameter
        }
        b. If BOHB don't have parameter waiting, will return "NoMoreTrialJobs" with
        {
            'parameter_id': '-1_0_0',
            'parameter_source': 'algorithm',
            'parameters': ''
        }
        """
        if not self.generated_hyper_configs:
            ret = {
                'parameter_id': '-1_0_0',
                'parameter_source': 'algorithm',
                'parameters': ''
            }
            send(CommandType.NoMoreTrialJobs, nni.dump(ret))
            return None
        assert self.generated_hyper_configs
        params = self.generated_hyper_configs.pop(0)
        ret = {
            'parameter_id': params[0],
            'parameter_source': 'algorithm',
            'parameters': params[1]
        }
        self.parameters[params[0]] = params[1]
        return ret
Esempio n. 9
0
def test_lightning_earlystop():
    import nni.retiarii.evaluator.pytorch.lightning as pl
    from pytorch_lightning.callbacks.early_stopping import EarlyStopping
    trainer = pl.Trainer(
        callbacks=[nni.trace(EarlyStopping)(monitor="val_loss")])
    trainer = nni.load(nni.dump(trainer))
    assert any(
        isinstance(callback, EarlyStopping) for callback in trainer.callbacks)
Esempio n. 10
0
def test_nested_class():
    a = SimpleClass(1, 2)
    b = SimpleClass(a)
    assert b._a._a == 1
    dump_str = nni.dump(b)
    b = nni.load(dump_str)
    assert 'SimpleClass object at' in repr(b)
    assert b._a._a == 1
Esempio n. 11
0
 def _not_dup(self, formatted_parameters: FormattedParameters) -> bool:
     params = deformat_parameters(formatted_parameters, self._space)
     params_str = typing.cast(str, nni.dump(params, sort_keys=True))
     if params_str in self._history:
         return False
     else:
         self._history.add(params_str)
         return True
Esempio n. 12
0
def test_external_class():
    from collections import OrderedDict
    d = nni.trace(kw_only=False)(OrderedDict)([('a', 1), ('b', 2)])
    assert d['a'] == 1
    assert d['b'] == 2
    dump_str = nni.dump(d)
    assert dump_str == '{"a": 1, "b": 2}'

    conv = nni.trace(torch.nn.Conv2d)(3, 16, 3)
    assert conv.in_channels == 3
    assert conv.out_channels == 16
    assert conv.kernel_size == (3, 3)
    assert nni.dump(conv) == \
        r'{"__symbol__": "path:torch.nn.modules.conv.Conv2d", ' \
        r'"__kwargs__": {"in_channels": 3, "out_channels": 16, "kernel_size": 3}}'

    conv = nni.load(nni.dump(conv))
    assert conv.kernel_size == (3, 3)
Esempio n. 13
0
def test_lightning_earlystop():
    import nni.retiarii.evaluator.pytorch.lightning as pl
    from pytorch_lightning.callbacks.early_stopping import EarlyStopping
    trainer = pl.Trainer(
        callbacks=[nni.trace(EarlyStopping)(monitor="val_loss")])
    pickle_size_limit = 4096 if sys.platform == 'linux' else 32768
    trainer = nni.load(nni.dump(trainer, pickle_size_limit=pickle_size_limit))
    assert any(
        isinstance(callback, EarlyStopping) for callback in trainer.callbacks)
Esempio n. 14
0
def test_dedup_nested():
    space = format_search_space(nested_space)
    dedup = Deduplicator(space)
    params = set()
    for i in range(10):
        p = dedup(suggest(rng, space))
        s = nni.dump(deformat_parameters(p, space), sort_keys=True)
        assert s not in params
        params.add(s)
Esempio n. 15
0
def test_ordered_json():
    items = [
        ('a', 1),
        ('c', 3),
        ('b', 2),
    ]
    orig = OrderedDict(items)
    json = nni.dump(orig)
    loaded = nni.load(json)
    assert list(loaded.items()) == items
Esempio n. 16
0
def test_nested_class():
    a = SimpleClass(1, 2)
    b = SimpleClass(a)
    assert b._a._a == 1
    dump_str = nni.dump(b)
    b = nni.load(dump_str)
    assert repr(
        b
    ) == 'SerializableObject(type=SimpleClass, a=SerializableObject(type=SimpleClass, a=1, b=2))'
    assert b.get()._a._a == 1
Esempio n. 17
0
    def generate_parameters(self, *args, **kwargs):
        while True:
            params = self._suggest()
            if params is None:
                raise nni.NoMoreTrialError('Search space fully explored')
            params = deformat_parameters(params, self.space)

            params_str = nni.dump(params, sort_keys=True)
            if params_str not in self.history:
                self.history.add(params_str)
                return params
Esempio n. 18
0
def test_simple_class():
    instance = SimpleClass(1, 2)
    assert instance._a == 1
    assert instance._b == 2

    dump_str = nni.dump(instance)
    assert '"__kwargs__": {"a": 1, "b": 2}' in dump_str
    assert '"__symbol__"' in dump_str
    instance = nni.load(dump_str)
    assert instance._a == 1
    assert instance._b == 2
Esempio n. 19
0
    def handle_report_metric_data(self, data):
        """
        Parameters
        ----------
        data:
            it is an object which has keys 'parameter_id', 'value', 'trial_job_id', 'type', 'sequence'.

        Raises
        ------
        ValueError
            Data type not supported
        """
        if 'value' in data:
            data['value'] = nni.load(data['value'])
        # multiphase? need to check
        if data['type'] == MetricType.REQUEST_PARAMETER:
            assert multi_phase_enabled()
            assert data['trial_job_id'] is not None
            assert data['parameter_index'] is not None
            assert data['trial_job_id'] in self.job_id_para_id_map
            self._handle_trial_end(
                self.job_id_para_id_map[data['trial_job_id']])
            ret = self._get_one_trial_job()
            if data['trial_job_id'] is not None:
                ret['trial_job_id'] = data['trial_job_id']
            if data['parameter_index'] is not None:
                ret['parameter_index'] = data['parameter_index']
            self.job_id_para_id_map[data['trial_job_id']] = ret['parameter_id']
            send(CommandType.SendTrialJobParameter, nni.dump(ret))
        else:
            value = extract_scalar_reward(data['value'])
            bracket_id, i, _ = data['parameter_id'].split('_')

            # add <trial_job_id, parameter_id> to self.job_id_para_id_map here,
            # because when the first parameter_id is created, trial_job_id is not known yet.
            if data['trial_job_id'] in self.job_id_para_id_map:
                assert self.job_id_para_id_map[
                    data['trial_job_id']] == data['parameter_id']
            else:
                self.job_id_para_id_map[
                    data['trial_job_id']] = data['parameter_id']

            if data['type'] == MetricType.FINAL:
                # sys.maxsize indicates this value is from FINAL metric data, because data['sequence'] from FINAL metric
                # and PERIODICAL metric are independent, thus, not comparable.
                self.brackets[bracket_id].set_config_perf(
                    int(i), data['parameter_id'], sys.maxsize, value)
                self.completed_hyper_configs.append(data)
            elif data['type'] == MetricType.PERIODICAL:
                self.brackets[bracket_id].set_config_perf(
                    int(i), data['parameter_id'], data['sequence'], value)
            else:
                raise ValueError('Data type not supported: {}'.format(
                    data['type']))
Esempio n. 20
0
def test_model_wrapper_serialize():
    from nni.retiarii import model_wrapper

    @model_wrapper
    class Model(nn.Module):
        def __init__(self, in_channels):
            super().__init__()
            self.in_channels = in_channels

    model = Model(3)
    dumped = nni.dump(model)
    loaded = nni.load(dumped)
    assert loaded.in_channels == 3
Esempio n. 21
0
 def _send_new_trial(self):
     while self.unsatisfied_jobs:
         ret = self._get_one_trial_job()
         if ret is None:
             break
         one_unsatisfied = self.unsatisfied_jobs.pop(0)
         ret['trial_job_id'] = one_unsatisfied['trial_job_id']
         ret['parameter_index'] = one_unsatisfied['parameter_index']
         # update parameter_id in self.job_id_para_id_map
         self.job_id_para_id_map[ret['trial_job_id']] = ret['parameter_id']
         send(CommandType.SendTrialJobParameter, nni.dump(ret))
     for _ in range(self.credit):
         self._request_one_trial_job()
Esempio n. 22
0
def test_function():
    t = nni.trace(math.sqrt, kw_only=False)(3)
    assert 1 < t < 2
    assert t.trace_symbol == math.sqrt
    assert t.trace_args == [3]
    t = nni.load(nni.dump(t))
    assert 1 < t < 2
    assert not is_traceable(t)  # trace not recovered, expected, limitation

    def simple_class_factory(bb=3.):
        return SimpleClass(1, bb)

    t = nni.trace(simple_class_factory)(4)
    ts = nni.dump(t)
    assert '__kwargs__' in ts
    t = nni.load(ts)
    assert t._a == 1
    assert is_traceable(t)
    t = t.trace_copy()
    assert is_traceable(t)
    assert t.trace_symbol(10)._b == 10
    assert t.trace_kwargs['bb'] == 4
    assert is_traceable(t.trace_copy())
Esempio n. 23
0
    def send_trial(self, parameters, placement_constraint=None):
        """
        Send parameters to NNI.

        Parameters
        ----------
        parameters : Any
            Any payload.

        Returns
        -------
        int
            Parameter ID that is assigned to this parameter,
            which will be used for identification in future.
        """
        self.parameters_count += 1
        if placement_constraint is None:
            placement_constraint = {
                'type': 'None',
                'gpus': []
            }
        self._validate_placement_constraint(placement_constraint)
        new_trial = {
            'parameter_id': self.parameters_count,
            'parameters': parameters,
            'parameter_source': 'algorithm',
            'placement_constraint': placement_constraint,
            'version_info': version_dump()
        }
        _logger.debug('New trial sent: %s', new_trial)

        try:
            send_payload = nni.dump(new_trial, pickle_size_limit=int(os.getenv('PICKLE_SIZE_LIMIT', 64 * 1024)))
        except PayloadTooLarge:
            raise ValueError(
                'Serialization failed when trying to dump the model because payload too large (larger than 64 KB). '
                'This is usually caused by pickling large objects (like datasets) by mistake. '
                'See the full error traceback for details and https://nni.readthedocs.io/en/stable/NAS/Serialization.html '
                'for how to resolve such issue. '
            )

        # trial parameters can be super large, disable pickle size limit here
        # nevertheless, there could still be blocked by pipe / nni-manager
        send(CommandType.NewTrialJob, send_payload)

        if self.send_trial_callback is not None:
            self.send_trial_callback(parameters)  # pylint: disable=not-callable
        return self.parameters_count
Esempio n. 24
0
def test_dataset():
    dataset = nni.trace(MNIST)(root='data/mnist', train=False, download=True)
    dataloader = nni.trace(DataLoader)(dataset, batch_size=10)

    dumped_ans = {
        "__symbol__": "path:torch.utils.data.dataloader.DataLoader",
        "__kwargs__": {
            "dataset": {
                "__symbol__": "path:torchvision.datasets.mnist.MNIST",
                "__kwargs__": {
                    "root": "data/mnist",
                    "train": False,
                    "download": True
                }
            },
            "batch_size": 10
        }
    }
    print(nni.dump(dataloader))
    print(nni.dump(dumped_ans))
    assert nni.dump(dataloader) == nni.dump(dumped_ans)
    dataloader = nni.load(nni.dump(dumped_ans))
    assert isinstance(dataloader, DataLoader)

    dataset = nni.trace(MNIST)(root='data/mnist',
                               train=False,
                               download=True,
                               transform=nni.trace(transforms.Compose)([
                                   nni.trace(transforms.ToTensor)(),
                                   nni.trace(transforms.Normalize)((0.1307, ),
                                                                   (0.3081, ))
                               ]))
    dataloader = nni.trace(DataLoader)(dataset, batch_size=10)
    x, y = next(iter(nni.load(nni.dump(dataloader))))
    assert x.size() == torch.Size([10, 1, 28, 28])
    assert y.size() == torch.Size([10])

    dataset = nni.trace(MNIST)(root='data/mnist',
                               train=False,
                               download=True,
                               transform=nni.trace(transforms.Compose)([
                                   transforms.ToTensor(),
                                   transforms.Normalize((0.1307, ), (0.3081, ))
                               ]))
    dataloader = nni.trace(DataLoader)(dataset, batch_size=10)
    x, y = next(iter(nni.load(nni.dump(dataloader))))
    assert x.size() == torch.Size([10, 1, 28, 28])
    assert y.size() == torch.Size([10])
Esempio n. 25
0
    def _get_one_trial_job(self):
        """get one trial job, i.e., one hyperparameter configuration."""
        if not self.generated_hyper_configs:
            if self.exec_mode == 'parallelism' or \
               (self.exec_mode == 'serial' and (self.curr_bracket_id is None or self.brackets[self.curr_bracket_id].is_completed())):
                if self.curr_s < 0:
                    self.curr_s = self.s_max
                    self.curr_hb += 1
                _logger.debug(
                    'create a new bracket, self.curr_hb=%d, self.curr_s=%d',
                    self.curr_hb, self.curr_s)
                self.curr_bracket_id = '{}-{}'.format(self.curr_hb,
                                                      self.curr_s)
                self.brackets[self.curr_bracket_id] = Bracket(
                    self.curr_bracket_id, self.curr_s, self.s_max, self.eta,
                    self.R, self.optimize_mode)
                next_n, next_r = self.brackets[self.curr_bracket_id].get_n_r()
                _logger.debug('new bracket, next_n=%d, next_r=%d', next_n,
                              next_r)
                assert self.searchspace_json is not None and self.random_state is not None
                generated_hyper_configs = self.brackets[
                    self.curr_bracket_id].get_hyperparameter_configurations(
                        next_n, next_r, self.searchspace_json,
                        self.random_state)
                self.generated_hyper_configs = generated_hyper_configs.copy()
                self.curr_s -= 1
            else:
                ret = {
                    'parameter_id': '-1_0_0',
                    'parameter_source': 'algorithm',
                    'parameters': ''
                }
                send(CommandType.NoMoreTrialJobs, nni.dump(ret))
                return None

        assert self.generated_hyper_configs
        params = self.generated_hyper_configs.pop(0)
        ret = {
            'parameter_id': params[0],
            'parameter_source': 'algorithm',
            'parameters': params[1]
        }
        return ret
Esempio n. 26
0
def test_get():
    @nni.trace
    class Foo:
        def __init__(self, a=1):
            self._a = a

        def bar(self):
            return self._a + 1

    obj = Foo(3)
    assert nni.load(nni.dump(obj)).bar() == 4
    obj1 = obj.trace_copy()
    with pytest.raises(AttributeError):
        obj1.bar()
    obj1.trace_kwargs['a'] = 5
    obj1 = obj1.get()
    assert obj1.bar() == 6
    obj2 = obj1.trace_copy()
    obj2.trace_kwargs['a'] = -1
    assert obj2.get().bar() == 0
Esempio n. 27
0
    def _request_one_trial_job(self):
        """get one trial job, i.e., one hyperparameter configuration.

        If this function is called, Command will be sent by BOHB:
        a. If there is a parameter need to run, will return "NewTrialJob" with a dict:
        {
            'parameter_id': id of new hyperparameter
            'parameter_source': 'algorithm'
            'parameters': value of new hyperparameter
        }
        b. If BOHB don't have parameter waiting, will return "NoMoreTrialJobs" with
        {
            'parameter_id': '-1_0_0',
            'parameter_source': 'algorithm',
            'parameters': ''
        }
        """
        ret = self._get_one_trial_job()
        if ret is not None:
            send(CommandType.NewTrialJob, nni.dump(ret))
            self.credit -= 1
Esempio n. 28
0
def test_unserializable():
    a = UnserializableSimpleClass()
    dump_str = nni.dump(a)
    a = nni.load(dump_str)
    assert a._a == 1
Esempio n. 29
0
 def _request_one_trial_job(self):
     ret = self._get_one_trial_job()
     if ret is not None:
         send(CommandType.NewTrialJob, nni.dump(ret))
         self.credit -= 1
Esempio n. 30
0
    def handle_report_metric_data(self, data):
        """reveice the metric data and update Bayesian optimization with final result

        Parameters
        ----------
        data:
            it is an object which has keys 'parameter_id', 'value', 'trial_job_id', 'type', 'sequence'.

        Raises
        ------
        ValueError
            Data type not supported
        """
        logger.debug('handle report metric data = %s', data)
        if 'value' in data:
            data['value'] = nni.load(data['value'])
        if data['type'] == MetricType.REQUEST_PARAMETER:
            assert multi_phase_enabled()
            assert data['trial_job_id'] is not None
            assert data['parameter_index'] is not None
            assert data['trial_job_id'] in self.job_id_para_id_map
            self._handle_trial_end(
                self.job_id_para_id_map[data['trial_job_id']])
            ret = self._get_one_trial_job()
            if ret is None:
                self.unsatisfied_jobs.append({
                    'trial_job_id':
                    data['trial_job_id'],
                    'parameter_index':
                    data['parameter_index']
                })
            else:
                ret['trial_job_id'] = data['trial_job_id']
                ret['parameter_index'] = data['parameter_index']
                # update parameter_id in self.job_id_para_id_map
                self.job_id_para_id_map[
                    data['trial_job_id']] = ret['parameter_id']
                send(CommandType.SendTrialJobParameter, nni.dump(ret))
        else:
            assert 'value' in data
            value = extract_scalar_reward(data['value'])
            if self.optimize_mode is OptimizeMode.Maximize:
                reward = -value
            else:
                reward = value
            assert 'parameter_id' in data
            s, i, _ = data['parameter_id'].split('_')
            logger.debug('bracket id = %s, metrics value = %s, type = %s', s,
                         value, data['type'])
            s = int(s)

            # add <trial_job_id, parameter_id> to self.job_id_para_id_map here,
            # because when the first parameter_id is created, trial_job_id is not known yet.
            if data['trial_job_id'] in self.job_id_para_id_map:
                assert self.job_id_para_id_map[
                    data['trial_job_id']] == data['parameter_id']
            else:
                self.job_id_para_id_map[
                    data['trial_job_id']] = data['parameter_id']

            assert 'type' in data
            if data['type'] == MetricType.FINAL:
                # and PERIODICAL metric are independent, thus, not comparable.
                assert 'sequence' in data
                self.brackets[s].set_config_perf(int(i), data['parameter_id'],
                                                 sys.maxsize, value)
                self.completed_hyper_configs.append(data)

                _parameters = self.parameters[data['parameter_id']]
                _parameters.pop(_KEY)
                # update BO with loss, max_s budget, hyperparameters
                self.cg.new_result(loss=reward,
                                   budget=data['sequence'],
                                   parameters=_parameters,
                                   update_model=True)
            elif data['type'] == MetricType.PERIODICAL:
                self.brackets[s].set_config_perf(int(i), data['parameter_id'],
                                                 data['sequence'], value)
            else:
                raise ValueError('Data type not supported: {}'.format(
                    data['type']))