Пример #1
0
def tune_meta_learner():
    cs = build_configspace()
    def_value = obj_function(cs.get_default_configuration())
    print("Default Value: %.2f" % (def_value))

    bo = BayesianOptimization(obj_function, cs, max_runs=50, time_limit_per_trial=1200)
    bo.run()
    inc_value = bo.get_incumbent()
    config = inc_value[0][0]

    print('Best hyperparameter config found', config)
    return config
Пример #2
0
def tune_meta_learner():
    cs = build_configspace()
    def_value = objective_function(cs.get_default_configuration())
    print("Default Value: %.2f" % (def_value))

    bo = BayesianOptimization(objective_function, cs, max_runs=50, time_limit_per_trial=150)
    bo.run()
    inc_value = bo.get_incumbent()
    config = inc_value[0][0]

    with open(meta_dir + 'meta_learner_%s_%s_%s_config.pkl' % (meta_algo, metric, hash_id), 'wb') as f:
        pk.dump(config, f)
    print('Best hyperparameter config found', config)
    return config
def run(dataset_name):
    file_id = '%s-resnet-%s-%d.pkl' % (dataset_name, mode, trial_num)
    saved_file = os.path.join(data_dir, file_id)

    # (x_train, y_train), (x_test, y_test), cls_num = load_dataset(dataset_name)
    # print(x_train[0])
    # print(x_test[0])
    # print(x_train.shape)
    # print(x_test.shape)
    # print(y_train.shape)

    def objective_function(cfg):
        (x_train, y_train), (x_test,
                             y_test), cls_num = load_dataset(dataset_name)
        epochs_num, run_count = get_default_setting(dataset_name)
        val_error = train(cls_num,
                          epochs_num,
                          cfg,
                          x_train,
                          y_train,
                          x_test,
                          y_test,
                          seed=32)
        print('the validation accuracy is ', 1 - val_error)

        if not os.path.exists(saved_file):
            data = list()
        else:
            with open(saved_file, 'rb') as f:
                data = pickle.load(f)
        data.append([cfg, val_error])

        with open(saved_file, 'wb') as f:
            pickle.dump(data, f)
        return val_error

    cs = create_configspace()
    bo = BO(objective_function,
            cs,
            max_runs=trial_num,
            time_limit_per_trial=10000,
            sample_strategy=mode,
            rng=np.random.RandomState(1))
    bo.run()
Пример #4
0
                min_impurity_decrease=self.min_impurity_decrease,
                random_state=self.random_state,
                n_jobs=self.n_jobs,
                class_weight=self.class_weight,
                warm_start=True)

        self.estimator.fit(X, y, sample_weight=sample_weight)
        return self

    def predict(self, X):
        if self.estimator is None:
            raise NotImplementedError()
        return self.estimator.predict(X)


dataset_list = dataset_str.split(',')
check_datasets(dataset_list)
cs = get_cs()

_run_count = min(int(len(set(cs.sample_configuration(30000))) * 0.75), run_count)
print(_run_count)

for dataset in dataset_list:
    node = load_data(dataset, '../soln-ml/', True, task_type=0)
    _x, _y = node.data[0], node.data[1]
    eval = partial(eval_func, x=_x, y=_y)
    bo = BO(eval, cs, max_runs=_run_count, time_limit_per_trial=600, sample_strategy=mode, rng=np.random.RandomState(1))
    bo.run()
    with open('logs/%s-random_forest-%s-%d.pkl' % (dataset, mode, run_count), 'wb')as f:
        pickle.dump(bo.get_history().data, f)
Пример #5
0
def test_branin():
    space_dict = {
        "parameters": {
            "x1": {
                "type": "float",
                "bound": [-5, 10],
                "default": 0
            },
            "x2": {
                "type": "float",
                "bound": [0, 15]
            },
        }
    }

    cs = get_config_space_from_dict(space_dict)
    print(cs)

    bo = BayesianOptimization(branin,
                              cs,
                              max_runs=30,
                              time_limit_per_trial=3,
                              logging_dir='logs')
    bo.run()
    inc_value = bo.get_incumbent()
    print('BO', '=' * 30)
    print(inc_value)

    # Evaluate the random search.
    bo = BayesianOptimization(branin,
                              cs,
                              max_runs=30,
                              time_limit_per_trial=3,
                              sample_strategy='random',
                              logging_dir='logs')
    bo.run()
    inc_value = bo.get_incumbent()
    print('RANDOM', '=' * 30)
    print(inc_value)

    # Evaluate batch BO.
    bo = BatchBayesianOptimization(branin,
                                   cs,
                                   max_runs=10,
                                   batch_size=3,
                                   time_limit_per_trial=3,
                                   sample_strategy='median_imputation',
                                   logging_dir='logs')
    bo.run()
    inc_value = bo.get_incumbent()
    print('MEDIAN IMPUTATION BATCH BO', '=' * 30)
    print(inc_value)

    # Evaluate batch BO.
    bo = BatchBayesianOptimization(branin,
                                   cs,
                                   max_runs=10,
                                   batch_size=3,
                                   time_limit_per_trial=3,
                                   sample_strategy='local_penalization',
                                   logging_dir='logs')
    bo.run()
    inc_value = bo.get_incumbent()
    print('LOCAL PENALIZATION BATCH BO', '=' * 30)
    print(inc_value)