Example #1
0
    def auto_tune(self,
                  X,
                  y,
                  num_evals=50,
                  num_folds=2,
                  opt_metric="r_squared",
                  nprocs=1):

        if nprocs == -1:
            nprocs = mp.cpu_count()
        if nprocs != 1:
            pmap = optunity.parallel.create_pmap(nprocs)
        else:
            pmap = inspect.signature(
                optunity.minimize_structured).parameters["pmap"].default

        if opt_metric == "r_squared":
            optimal_configuration, info, _ = optunity.maximize_structured(
                partial(self._eval_factory(num_folds, r_squared), X, y),
                search_space=self.search,
                num_evals=num_evals,
                pmap=pmap)
        if opt_metric == "mse":
            optimal_configuration, info, _ = optunity.minimize_structured(
                partial(self._eval_factory(num_folds, mse), X, y),
                search_space=self.search,
                num_evals=num_evals,
                pmap=pmap)

        return optimal_configuration
Example #2
0
 def execute_solver(self, searchspace):
     LOG.debug("execute_solver using solution space:\n\n\t{}\n".format(
         pformat(searchspace)))
     try:
         self.best, _, _ = optunity.minimize_structured(
             f=self.loss_function,
             num_evals=self.max_iterations,
             search_space=searchspace)
     except Exception as e:
         LOG.error(
             "internal error in optunity.minimize_structured occured. {}".
             format(e))
         raise BrokenPipeError(
             "internal error in optunity.minimize_structured occured. {}".
             format(e))
Example #3
0
    def execute_solver(self, searchspace):
        """
        This function is called immediately after convert_searchspace and get the output of the latter as input. It's
        purpose is to call the solver libs main optimization function.

        :param searchspace: converted hyperparameter space
        """
        LOG.debug("execute_solver using solution space:\n\n\t{}\n".format(pformat(searchspace)))
        try:
            self.best, _, _ = optunity.minimize_structured(f=self.loss_function,
                                                           num_evals=self.max_iterations,
                                                           search_space=searchspace)
        except Exception as e:
            LOG.error("internal error in optunity.minimize_structured occured. {}".format(e))
            raise BrokenPipeError("internal error in optunity.minimize_structured occured. {}".format(e))
Example #4
0
    def optimize(self,
                 data,
                 search_space,
                 val_data=None,
                 num_evals=50,
                 optimize='max',
                 solver_name='particle swarm'):
        """
        Parameters:
        -----------
        data: [X, Y] - arrays
            This data will be used for crossval training (considering 'train_test_split' parameter)

        search_space: dict
            Dict with parameters to optimize. E.g. 'units' : [100,1000]

        val_data: [X, Y] - arrays
            Default - None. If specified than optimizer metric will be evaluated on val_data. Also if specified than 'train_test_split' parameter will be ignored.

        num_evals: int
            Count of iterations for optunity optimizer

        optimize: str
            'max'/'min' supported

        solver_name: str
            Default 'particle swarm'. Only default parameter supported now.     
        """
        train_manager = self._create_train_manager(data, val_data,
                                                   search_space)
        sys.setrecursionlimit(100000)
        if (optimize == 'max'):
            self.retr, self.extra, self.info = opt.maximize_structured(
                f=train_manager.train,
                num_evals=num_evals,
                search_space=search_space)
        elif (optimize == 'min'):
            self.retr, self.extra, self.info = opt.minimize_structured(
                f=train_manager.train,
                search_space=search_space,
                num_evals=num_evals)
        else:
            raise (InvalidParamError('optimize', optimize))
        #load and ret best
        best_model = train_manager.get_best_model(self.extra)
        return best_model
def compute_mse_all_tuned(x_train, y_train, x_test, y_test):
    """Computes MSE of an SVR with RBF kernel and optimized hyperparameters."""

    # define objective function for tuning
    @optunity.cross_validated(x=x_train, y=y_train, num_iter=2, num_folds=5)
    def tune_cv(x_train, y_train, x_test, y_test, kernel, C, gamma, degree,
                coef0):
        if kernel == 'linear':
            model = sklearn.svm.SVR(kernel=kernel, C=C)
        elif kernel == 'poly':
            model = sklearn.svm.SVR(kernel=kernel,
                                    C=C,
                                    degree=degree,
                                    coef0=coef0)
        elif kernel == 'rbf':
            model = sklearn.svm.SVR(kernel=kernel, C=C, gamma=gamma)
        else:
            raise ArgumentError("Unknown kernel function: %s" % kernel)
        model.fit(x_train, y_train)

        predictions = model.predict(x_test)
        return optunity.metrics.mse(y_test, predictions)

    # optimize parameters
    optimal_pars, _, _ = optunity.minimize_structured(tune_cv,
                                                      num_evals=150,
                                                      search_space=space)

    # remove hyperparameters with None value from optimal pars
    for k, v in list(optimal_pars.items()):
        if v is None:
            del optimal_pars[k]
    print("optimal hyperparameters: " + str(optimal_pars))

    tuned_model = sklearn.svm.SVR(**optimal_pars).fit(x_train, y_train)
    predictions = tuned_model.predict(x_test)
    return optunity.metrics.mse(y_test, predictions)
Example #6
0
    'alpha': [0.001, 1],
    'num_hidden_layers': {
        'one_layer': {
            'n_hidden_1': [1, 128]
        },
        'two_layers': {
            'n_hidden_1': [1, 128],
            'n_hidden_2': [1, 128]
        },
        'three_layers': {
            'n_hidden_1': [1, 128],
            'n_hidden_2': [1, 128],
            'n_hidden_3': [1, 128]
        }
    }
}

#main process
if __name__ == "__main__":

    #options are classification, regression and unsupervised learning
    #pmap=optunity.pmap
    hps, info, _2 = optunity.minimize_structured(train_model_mlp,
                                                 space_classification,
                                                 num_evals=5)

    print "optimised vals: ", hps  # results
    print "optimisation information: ", info.stats
    print "saving data..."
    save_data(hps, info)
Example #7
0
            n_hidden_list.append(int(n_hidden_3))

    # train and fit model
    model = MLPClassifier(solver='lbfgs',
                          alpha=alpha,
                          hidden_layer_sizes=n_hidden_list,
                          random_state=1)
    model.fit(x_train, y_train)
    score = model.score(x_test, y_test)  #standard score model

    #print "Iteration Number: ", iteration_counter
    #print "score: ", score
    #print "params: ", alpha, n_hidden_1, n_hidden_2, n_hidden_3
    return score


#MAIN MODEL

try:
    hps, info, _2 = optunity.minimize_structured(train_model,
                                                 space_classification,
                                                 num_evals=max_evals,
                                                 pmap=optunity.pmap)
    #hps = 1
    print("Hyperparameter Optimization Successful")
    print("Convergence achieved in %d iterations") % max_evals
except Exception, e:
    print("Hyperparameter Optimization Failed: "), e
#print "saving data..."
save_data(hps, info)