Beispiel #1
0
    def __init__(self,
                 name=None,
                 *,
                 params=None,
                 optimizer=None,
                 half=False,
                 loss_scale=1,
                 dynamic_loss_scale=False,
                 scale_window=1000,
                 scale_factor=2,
                 min_loss_scale=None,
                 max_loss_scale=2.**24,
                 **kwargs):
        self._optimizer = None

        if params is not None:
            params = list(params)
            assert isinstance(params, (list, tuple))

        self._model_parameters = params
        self._half_parameters(half, loss_scale, dynamic_loss_scale,
                              scale_window, scale_factor, min_loss_scale,
                              max_loss_scale)

        # Track defined hyper parameters
        self.hyper_parameters = HyperParameters(space={})

        if optimizer:
            warning('Using custom optimizer')
            if isinstance(optimizer, type):
                self.optimizer_builder = optimizer

                if hasattr(optimizer, 'get_space'):
                    self.hyper_parameters.space = optimizer.get_space()
            else:
                self._optimizer = self._wrap_optimizer(optimizer)

                if hasattr(self._optimizer, 'get_space'):
                    self.hyper_parameters.space = self._optimizer.get_space()

        elif name:
            # load an olympus model
            self.optimizer_builder = registered_optimizers.get(name.lower())

            if not self.optimizer_builder:
                raise RegisteredOptimizerNotFound(name)

            if hasattr(self.optimizer_builder, 'get_space'):
                self.hyper_parameters.space = self.optimizer_builder.get_space(
                )

        else:
            raise MissingArgument('optimizer or name needs to be set')

        # All additional args are hyper parameters
        self.hyper_parameters.add_parameters(**kwargs)
Beispiel #2
0
def test_hyperparameter_nested_tracking_all_set():
    space = {
        'initializer': {
            'a': 'uniform(0, 1)',
            'b': 'uniform(0, 1)',
        }
    }

    hp = HyperParameters(space, initializer=dict(a=0.123, b=0.124))
    assert hp.parameters(strict=True) == dict(initializer=dict(a=0.123, b=0.124))
Beispiel #3
0
    def __init__(self, name, seed=0, **kwargs):
        self.name = name
        self.hyper_parameters = HyperParameters(space={})
        self.seed = seed
        self._initializer = None

        self.initializer_ctor = registered_initialization.get(name)

        if self.initializer_ctor is None:
            raise RegisteredInitNotFound(name)

        if hasattr(self.initializer_ctor, 'get_space'):
            self.hyper_parameters.space = self.initializer_ctor.get_space()

        self.hyper_parameters.add_parameters(**kwargs)
Beispiel #4
0
def test_hyperparameter_tracking():
    space = {
        'a': 'uniform(0, 1)',
        'b': 'uniform(0, 1)'
    }

    # space with Fixed HP
    hp = HyperParameters(space, b=0.124)

    # Hp a is missing
    with pytest.raises(MissingParameters):
        hp.parameters(strict=True)

    # return the space of missing params
    assert hp.missing_parameters() == dict(a='uniform(0, 1)')

    hp.add_parameters(a=0.123)
    assert hp.missing_parameters() == {}
    assert hp.parameters(strict=True) == dict(a=0.123, b=0.124)
Beispiel #5
0
def test_hyperparameter_nested_tracking():
    space = {
        'initializer': {
            'a': 'uniform(0, 1)',
            'b': 'uniform(0, 1)',
        }
    }

    hp = HyperParameters(space, initializer=dict(b=0.124))

    # Hp a is missing
    with pytest.raises(MissingParameters):
        hp.parameters(strict=True)

    # return the space of missing params
    assert hp.missing_parameters() == dict(initializer=dict(a='uniform(0, 1)'))

    hp.add_parameters(initializer=dict(a=0.123))
    assert hp.missing_parameters() == {}
    assert hp.parameters(strict=True) == dict(initializer=dict(a=0.123, b=0.124))
Beispiel #6
0
    def __init__(self, name=None, *, schedule=None, optimizer=None, **kwargs):
        self._schedule = None
        self._schedule_builder = None
        self._optimizer = optimizer

        self.hyper_parameters = HyperParameters(space={})

        if schedule:
            if isinstance(schedule, type):
                self._schedule_builder = schedule

                if hasattr(schedule, 'get_space'):
                    self.hyper_parameters.space = schedule.get_space()

            else:
                self._schedule = schedule

            if hasattr(self._schedule, 'get_space'):
                self.hyper_parameters.space = self._schedule.get_space()

        elif name:
            # load an olympus model
            builder = registered_schedules.get(name)

            if not builder:
                raise RegisteredLRSchedulerNotFound(name)

            self._schedule_builder = builder

            if hasattr(self._schedule_builder, 'get_space'):
                self.hyper_parameters.space = self._schedule_builder.get_space()

        else:
            raise MissingArgument('None or name needs to be set')

        self.hyper_parameters.add_parameters(**kwargs)
Beispiel #7
0
 def __init__(self, random_state, **hyper_parameters):
     self.model_ctor = sklearn.neural_network.MLPRegressor
     self.random_state = random_state
     self.hp = HyperParameters(self.hyperparameter_space(),
                               **hyper_parameters)
     self.model = None
Beispiel #8
0
 def __init__(self, random_state, **hyper_parameters):
     self.model_ctor = linear_model.LogisticRegression
     self.random_state = random_state
     self.hp = HyperParameters(self.hyperparameter_space(),
                               **hyper_parameters)
     self.model = None
Beispiel #9
0
    def __init__(self,
                 name=None,
                 *,
                 half=False,
                 model=None,
                 input_size=None,
                 output_size=None,
                 weight_init=default_init,
                 **kwargs):
        super(Model, self).__init__()
        # Save all the args that ware passed down so we can instantiate it again in standalone
        self.replay_args = dict(name=name,
                                half=half,
                                model=model,
                                input_size=input_size,
                                output_size=output_size,
                                weight_init=weight_init,
                                kwargs=kwargs)

        self.transform = lambda x: try_convert(x, self.device, self.dtype)
        self.half = half
        self._model = None

        # Track defined hyper parameters
        self.hyper_parameters = HyperParameters(space=dict())

        # If init is set then we can add its hyper parameters
        self.weight_init = weight_init
        if weight_init is not None:
            if isinstance(weight_init, str):
                self.weight_init = Initializer(weight_init)

            # replace weight init by its own hyper parameters
            space = self.weight_init.get_space()
            if space:
                self.hyper_parameters.space.update(dict(initializer=space))

        # Make a Lazy Model that will be initialized once all the hyper parameters are set
        if model:
            if hasattr(model, 'get_space'):
                self.hyper_parameters.space.update(model.get_space())

            if isinstance(model, type):
                self.model_builder = LazyCall(model,
                                              input_size=input_size,
                                              output_size=output_size)
            else:
                self.model_builder = LazyCall(lambda *args, **kwargs: model)

        elif name:
            # load an olympus model
            model_fun = registered_models.get(name)

            if not model_fun:
                raise RegisteredModelNotFound(name)

            self.model_builder = LazyCall(model_fun,
                                          input_size=input_size,
                                          output_size=output_size)

            if hasattr(model_fun, 'get_space'):
                self.hyper_parameters.space.update(model_fun.get_space())
        else:
            raise MissingArgument('Model or Name need to be set')

        # Any Additional parameters set Hyper parameters
        self.other_params = self.hyper_parameters.add_parameters(strict=False,
                                                                 **kwargs)
Beispiel #10
0
 def __init__(self, random_state, **hyper_parameters):
     self.model_ctor = tree.DecisionTreeClassifier
     self.random_state = random_state
     self.hp = HyperParameters(self.hyperparameter_space(), **hyper_parameters)
     self.model = None