예제 #1
0
    def objective_function(self,
                           configuration: Union[CS.Configuration, Dict],
                           fidelity: Union[Dict, CS.Configuration,
                                           None] = None,
                           rng: Union[np.random.RandomState, int, None] = None,
                           **kwargs) -> Dict:
        """
        Trains a bayesian neural network with 3 layers on the defined data set and evaluates the trained model on
        the validation split.

        Parameters
        ----------
        configuration : Dict, CS.Configuration
            Configuration for the pyBNN model
        fidelity: Dict, None
            budget : int [500 - 10000]
                number of epochs to train the model
            Fidelity parameters for the pyBNN model, check get_fidelity_space(). Uses default (max) value if None.
        rng : np.random.RandomState, int, None,
            Random seed for benchmark. By default the class level random seed.

            To prevent overfitting on a single seed, it is possible to pass a
            parameter ``rng`` as 'int' or 'np.random.RandomState' to this function.
            If this parameter is not given, the default random state is used.
        kwargs

        Returns
        -------
        Dict -
            function_value : validation loss
            cost : time to train and evaluate the model
            info : Dict
                fidelity : used fidelities in this evaluation
        """
        start = time.time()

        self.rng = rng_helper.get_rng(rng=rng, self_rng=self.rng)
        np.random.seed(self.rng.randint(1, 10000))

        # See comment in objective function test
        burn_in_steps = max(1,
                            int(configuration['burn_in'] * fidelity['budget']))

        net = partial(_get_net,
                      n_units_1=configuration['n_units_1'],
                      n_units_2=configuration['n_units_2'])

        model = BayesianNeuralNetwork(sampling_method="sghmc",
                                      get_net=net,
                                      l_rate=configuration['l_rate'],
                                      mdecay=configuration['mdecay'],
                                      burn_in=burn_in_steps,
                                      n_iters=fidelity['budget'],
                                      precondition=True,
                                      normalize_input=True,
                                      normalize_output=True,
                                      rng=self.rng)

        model.train(self.train,
                    self.train_targets,
                    valid=self.valid,
                    valid_targets=self.valid_targets,
                    valid_after_n_steps=100)

        mean_pred, var_pred = model.predict(self.valid)

        # Negative log-likelihood
        valid_loss = self._neg_log_likelihood(self.valid_targets, mean_pred,
                                              var_pred)

        cost = time.time() - start

        return {
            'function_value': float(valid_loss),
            'cost': cost,
            'info': {
                'fidelity': fidelity
            }
        }
예제 #2
0
    def objective_function_test(self,
                                configuration: Union[Dict, CS.Configuration],
                                fidelity: Union[Dict, CS.Configuration,
                                                None] = None,
                                rng: Union[np.random.RandomState, int,
                                           None] = None,
                                **kwargs) -> Dict:
        """
        Trains a bayesian neural network with 3 layers on the train and valid data split and evaluates it on the test
        split.

        Parameters
        ----------
        configuration : Dict, CS.Configuration
            Configuration for the pyBNN model
        fidelity: Dict, None
            budget : int [500 - 10000]
                number of epochs to train the model
            Fidelity parameters for the pyBNN model, check get_fidelity_space(). Uses default (max) value if None.

            Note: The fidelity should be here the max budget (= 10000). By leaving this field empty, the maximum budget
            will be used by default.
        rng : np.random.RandomState, int, None,
            Random seed for benchmark. By default the class level random seed.

            To prevent overfitting on a single seed, it is possible to pass a
            parameter ``rng`` as 'int' or 'np.random.RandomState' to this function.
            If this parameter is not given, the default random state is used.
        kwargs

        Returns
        -------
        Dict -
            function_value : validation loss
            cost : time to train and evaluate the model
            info : Dict
                fidelity : used fidelities in this evaluation
        """
        start = time.time()

        self.rng = rng_helper.get_rng(rng=rng, self_rng=self.rng)
        np.random.seed(self.rng.randint(1, 10000))

        # `burn_in_steps` must be at least 1, otherwise, theano will raise an RuntimeError. (Actually, the definition of
        # the config space allows as lower limit a zero. In this case, set the number of steps to 1.)
        burn_in_steps = max(1,
                            int(configuration['burn_in'] * fidelity['budget']))

        net = partial(_get_net,
                      n_units_1=configuration['n_units_1'],
                      n_units_2=configuration['n_units_2'])

        model = BayesianNeuralNetwork(sampling_method="sghmc",
                                      get_net=net,
                                      l_rate=configuration['l_rate'],
                                      mdecay=configuration['mdecay'],
                                      burn_in=burn_in_steps,
                                      n_iters=fidelity['budget'],
                                      precondition=True,
                                      normalize_input=True,
                                      normalize_output=True,
                                      rng=self.rng)

        train = np.concatenate((self.train, self.valid))
        train_targets = np.concatenate(
            (self.train_targets, self.valid_targets))
        model.train(train, train_targets)

        mean_pred, var_pred = model.predict(self.test)
        test_loss = self._neg_log_likelihood(self.test_targets, mean_pred,
                                             var_pred)

        cost = time.time() - start
        return {
            'function_value': float(test_loss),
            'cost': cost,
            'info': {
                'fidelity': fidelity
            }
        }