Esempio n. 1
0
 def test_branin(self):
     """
         Check if we can find the branin minimum.
     """
     xmin, fval = fmin(branin, x0=(0,0),xmin=(-5, 0), xmax=(10, 15), max_evaluations=5000)
     self.assertEqual(len(xmin), 1)
     self.assertEqual(len(xmin["x"]), 2)
     self.assertAlmostEqual(fval, 0.397887, places=2) 
Esempio n. 2
0
    def test_categorical_params(self):
        """
            Check if we can find the branin minimum.
        """

        def test_fun(x_categorical):
            self.assertEqual(x_categorical, {"test1": "string", "test2": 2})
            return 1

        xmin, fval = fmin(test_fun, x_categorical={"test1": ["string"], "test2": [2]}, max_evaluations=1)
Esempio n. 3
0
    def test_float_params(self):
        """
            Check if we can find the branin minimum.
        """

        def test_fun(x):
            self.assertAlmostEqual(x[0], 3.0)
            self.assertAlmostEqual(x[1], 20.0)
            return 1

        xmin, fval = fmin(test_fun, x0=(3.0, 20.0), xmin=(-100, -100), xmax=(100, 100), max_evaluations=1)
Esempio n. 4
0
    def test_int_params(self):
        """
            We check that using int parameters work
            but comparing the expected and the actual
            default value.
        """

        def test_fun(x_int):
            self.assertEqual(x_int[0], 4)
            self.assertEqual(x_int[1], 19)
            return 1

        xmin, fval = fmin(test_fun, x0_int=(4, 19), xmin_int=(-100, -100), xmax_int=(100, 100), max_evaluations=1)
Esempio n. 5
0
    def test_categorical_params(self):
        """
            Check if we can find the branin minimum.
        """
        def test_fun(x_categorical):
            self.assertEqual(x_categorical, {"test1": "string", "test2": 2})
            return 1

        xmin, fval = fmin(test_fun,
                          x_categorical={
                              "test1": ["string"],
                              "test2": [2]
                          },
                          max_evaluations=1)
Esempio n. 6
0
    def test_float_params(self):
        """
            Check if we can find the branin minimum.
        """
        def test_fun(x):
            self.assertAlmostEqual(x[0], 3.)
            self.assertAlmostEqual(x[1], 20.)
            return 1

        xmin, fval = fmin(test_fun,
                          x0=(3., 20.),
                          xmin=(-100, -100),
                          xmax=(100, 100),
                          max_evaluations=1)
Esempio n. 7
0
    def test_custom_function_args(self):
        def test_fun(x_int, custom_arg1, custom_arg2):
            self.assertEqual(custom_arg1, 1)
            self.assertEqual(custom_arg2, "some_string")
            return 1

        xmin, fval = fmin(
            test_fun,
            x0_int=(4, 19),
            xmin_int=(-100, -100),
            xmax_int=(100, 100),
            max_evaluations=1,
            custom_args={"custom_arg1": 1, "custom_arg2": "some_string"},
        )
Esempio n. 8
0
    def test_custom_function_args(self):
        def test_fun(x_int, custom_arg1, custom_arg2):
            self.assertEqual(custom_arg1, 1)
            self.assertEqual(custom_arg2, "some_string")
            return 1

        xmin, fval = fmin(test_fun,
                          x0_int=(4, 19),
                          xmin_int=(-100, -100),
                          xmax_int=(100, 100),
                          max_evaluations=1,
                          custom_args={
                              "custom_arg1": 1,
                              "custom_arg2": "some_string"
                          })
 def optimize(self, num_evals=1000):
     """
         Run optimization algorithm to tune friction parameters.
     """
     x0 = self._get_start_value()
     xmin = self._get_lower_bounds()
     xmax = self._get_upper_bounds()
     best_params, error = fmin(self._evaluate,
                               x0=x0,
                               xmin=xmin,
                               xmax=xmax,
                               max_evaluations=num_evals)
     validation_error = self._compute_error(best_params['x'],
                                            self._validation_set)
     return best_params, error, validation_error
Esempio n. 10
0
    def test_int_params(self):
        """
            We check that using int parameters work
            but comparing the expected and the actual
            default value.
        """
        def test_fun(x_int):
            self.assertEqual(x_int[0], 4)
            self.assertEqual(x_int[1], 19)
            return 1

        xmin, fval = fmin(test_fun,
                          x0_int=(4, 19),
                          xmin_int=(-100, -100),
                          xmax_int=(100, 100),
                          max_evaluations=1)
from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.svm import SVC
from pysmac.optimize import fmin

X,y = datasets.load_iris(return_X_y=True)

def objective(x):
    #clf = SVC(C=x[0])
    clf = LogisticRegression(C=x[0], solver='lbfgs', multi_class='multinomial')
    return -1.0 * cross_val_score(clf, X, y, cv=5).mean()

parameters, score = fmin(objective=objective,
                         x0=[1.0],
                         xmin=[0.001],
                         xmax=[100.0],
                         max_evaluations=10)

# def objective(x_int):
#     #clf = DecisionTreeClassifier(min_samples_leaf=x_int[0], min_samples_split=x_int[1])
#     clf = RandomForestClassifier(max_depth=x_int[0], min_samples_leaf=x_int[1], min_samples_split=x_int[2])
#     return -1.0 * cross_val_score(clf, X, y, cv=5).mean()
#
# parameters, score = fmin(objective=objective,
#                          x0_int=[2, 50, 50],
#                          xmin_int=[2, 1, 2],
#                          xmax_int=[5, 100, 100],
#                          max_evaluations=20)

print('Lowest function value found: %f' % score)
Esempio n. 12
0
from pysmac.optimize import fmin

def objective(x_int, x_categorical, clf, X, y):
  max_features = x_int[0]
  min_samples_split = x_int[1]
  min_samples_leaf = x_int[2]

  clf.set_params(max_depth=x_categorical["max_depth"],
                 bootstrap=x_categorical["bootstrap"],
                 criterion=x_categorical["criterion"],
                 max_features=max_features,
                 min_samples_split=min_samples_split,
                 min_samples_leaf=min_samples_leaf)
  #we want to minimize and cross_val_score is the accuracy:
  scores = cross_val_score(clf, X, y)
  return -1 * np.mean(scores)


categorical_params = {"max_depth": [3, None],
                      "bootstrap": [True, False],
                      "criterion": ["gini", "entropy"]}

start = time()
xmin, fval = fmin(objective,
  x0_int=(3,3,3), xmin_int=(1,1,1), xmax_int=(10, 10, 10),
  x_categorical=categorical_params,
  max_evaluations=n_iter_search,
  custom_args={"X": X, "y": y, "clf": clf})
print("SMAC took %.2f seconds for %d candidate parameter settings."
      % (time() - start, n_iter_search))
print "Best configuration found: ", xmin
Esempio n. 13
0
        learning_rate=x[0],
        num_leaves=x_int[0],
        min_data_in_leaf=x_int[1],
        colsample_bytree=x[1],
        subsample=0.95,
        subsample_freq=1
    )

def objective(x, x_int):
    model = mk_model(x, x_int)
    return -1.0 * cross_val_score(model, X_train, y_train, scoring='neg_mean_squared_error', cv=5).mean()

parameters, score = fmin(objective=objective,
                         x0=[0.2, 0.9],
                         xmin=[0.001, 0.001],
                         xmax=[0.5, 1.0],
                         x0_int=[24, 24],
                         xmin_int=[10, 10],
                         xmax_int=[50, 50],
                         max_evaluations=50)

print('Lowest function value found: %f' % score)
print('Parameter setting %s' % parameters)

model = mk_model(parameters['x'], parameters['x_int'])

model.fit(X_train, y_train)

df_sub = pd.DataFrame({'Id': id_test, 'SalePrice': model.predict(X_test)})
df_sub.to_csv('kaggle-houses-submission.csv', index=False)
Esempio n. 14
0
grid_names = r['names(r_args_list$grid)']

for grid_name in grid_names:
    grid_var = grid[grid_name]

    if grid_var['type'] == 'continuous':
        pysmac_args['x0'].append(float(grid_var['init']))
        pysmac_args['xmin'].append(float(grid_var['min']))
        pysmac_args['xmax'].append(float(grid_var['max']))
    elif grid_var['type'] == 'discrete':
        pysmac_args['x0_int'].append(int(grid_var['init']))
        pysmac_args['xmin_int'].append(int(grid_var['min']))
        pysmac_args['xmax_int'].append(int(grid_var['max']))
    elif grid_var['type'] == 'categorical':
        raise Exception('Categorical not implemented yet. Use discrete type')
    else:
        raise Exception('No such type for grid var')

r('r_args_list[c("grid", "objective", "init_rcode")] <- NULL')
pysmac_args.update(r['r_args_list'])  # merge the rest arguments

try:
    best_pars, objective_val = fmin(**pysmac_args)
    fill_objective_args(best_pars)
    print '%+%'.join([
        "pysmac>>",
        str(objective_val), r["paste(deparse(objective_args), collapse='')"]
    ])
except Exception as e:
    print e