Ejemplo n.º 1
0
def create_optimizer(dim, fitness, n_step, n_init_sample, model_type):
    x1 = {'name': "x1", 'type': 'R', 'bounds': [-6, 6]}
    x2 = {'name': "x2", 'type': 'R', 'bounds': [-6, 6]}
    search_space = [x1, x2]

    if model_type == 'GP':
        thetaL = 1e-3 * (ub - lb) * np.ones(dim)
        thetaU = 10 * (ub - lb) * np.ones(dim)
        theta0 = np.random.rand(dim) * (thetaU - thetaL) + thetaL

        model = GaussianProcess(regr='constant',
                                corr='matern',
                                theta0=theta0,
                                thetaL=thetaL,
                                thetaU=thetaU,
                                nugget=1e-5,
                                nugget_estim=False,
                                normalize=False,
                                verbose=False,
                                random_start=15 * dim,
                                random_state=None)

    elif model_type == 'sklearn-RF':
        min_samples_leaf = max(1, int(n_init_sample / 20.))
        max_features = int(np.ceil(dim * 5 / 6.))
        model = RandomForest(n_estimators=100,
                             max_features=max_features,
                             min_samples_leaf=min_samples_leaf)

    elif model_type == 'R-RF':
        model = RrandomForest()

    opt = mipego(search_space,
                 fitness,
                 model,
                 max_iter=n_step,
                 random_seed=None,
                 n_init_sample=n_init_sample,
                 minimize=True,
                 optimizer='MIES')

    return opt
Ejemplo n.º 2
0
def create_optimizer(dim, fitness, n_step, n_init_sample, model_type):

    C = ContinuousSpace([-5, 5]) * 2
    I = OrdinalSpace([-100, 100])
    N = NominalSpace(['OK', 'A', 'B', 'C', 'D', 'E'])

    search_space = C * I * N
    levels = search_space.levels
    
    if model_type == 'GP':
#        thetaL = 1e-3 * (ub - lb) * np.ones(dim)
#        thetaU = 10 * (ub - lb) * np.ones(dim)
#        theta0 = np.random.rand(dim) * (thetaU - thetaL) + thetaL
#    
#        model = GaussianProcess(regr='constant', corr='matern',
#                                theta0=theta0, thetaL=thetaL,
#                                thetaU=thetaU, nugget=1e-5,
#                                nugget_estim=False, normalize=False,
#                                verbose=False, random_start = 15*dim,
#                                random_state=None)
        pass
                               
    elif model_type == 'sklearn-RF':
        min_samples_leaf = max(1, int(n_init_sample / 20.))
        max_features = int(np.ceil(dim * 5 / 6.))
        model = RandomForest(levels=levels, n_estimators=100,
                            max_features=max_features,
                            min_samples_leaf=min_samples_leaf)

    elif model_type == 'R-RF':
        min_samples_leaf = max(1, int(n_init_sample / 20.))
        max_features = int(np.ceil(dim * 5 / 6.))
        model = RrandomForest(levels=levels, n_estimators=100, 
                              max_features=max_features,
                              min_samples_leaf=min_samples_leaf)

    opt = mipego(search_space, fitness, model, max_iter=n_step, random_seed=None,
                   n_init_sample=n_init_sample, minimize=True, optimizer='MIES')
    
    return opt
# use random forest as the surrogate model
#CHRIS two surrogate models are needed
model = RandomForest(levels=search_space.levels, n_estimators=100)
opt = mipego(search_space,
             objective,
             model,
             ftarget=None,
             minimize=True,
             noisy=False,
             max_eval=None,
             max_iter=n_step,
             infill='MGFI',
             n_init_sample=n_init_sample,
             n_point=1,
             n_job=n_job,
             n_restart=None,
             max_infill_eval=None,
             wait_iter=3,
             optimizer='MIES',
             log_file=None,
             data_file=None,
             verbose=False,
             random_seed=None,
             available_gpus=available_gpus,
             bi=True,
             save_name='data_msphere_one_point')

incumbent, stop_dict = opt.run()
#print('incumbent #TODO_CHRIS makes no sense for now:')
#for x in incumbent:
    #try:
    #available_gpus.remove(5)
    #except:
    #pass
    print(available_gpus)

    n_job = max(min(5,len(available_gpus)),1)


    # use random forest as the surrogate model
    #CHRIS two surrogate models are needed
    time_model = RandomForest(levels=search_space.levels,n_estimators=100)
    loss_model = RandomForest(levels=search_space.levels,n_estimators=100)
    opt = mipego(search_space, objective, time_model, loss_model, ftarget=None,
                     minimize=True, noisy=False, max_eval=None, max_iter=n_step,
                     infill='HVI', n_init_sample=n_init_sample, n_point=1, n_job=n_job,
                     n_restart=None, max_infill_eval=None, wait_iter=3, optimizer='MIES',
                     log_file=None, data_file=None, verbose=False, random_seed=None,
                     available_gpus=available_gpus, bi=True,save_name='data_mbarrier_kayfeng_eps_var_alpha_mult_' + str(it),ref_time=None,ref_loss=None,hvi_alpha=0.1)

    #ref_time=1000.0,ref_loss=1000.0

    incumbent, stop_dict = opt.run()
#print('incumbent #TODO_CHRIS makes no sense for now:')
#for x in incumbent:
#    try:
#        print(str(x) + ':' + str(incumbent[x]))
#    except:
#        continue
#print ('stop_dict:')
#for x in stop_dict:
#    try:
Ejemplo n.º 5
0
C = ContinuousSpace([-5, 5], 'C') * 2
I = OrdinalSpace([-100, 100], 'I')
N = NominalSpace(['OK', 'A', 'B', 'C', 'D', 'E'], 'N')

search_space = C * I * N

model = RandomForest(levels=search_space.levels)
# model = RrandomForest(levels=search_space.levels, seed=1, max_features='sqrt')

opt = mipego(
    search_space,
    obj_func,
    model,
    max_iter=n_step,
    random_seed=None,
    n_init_sample=n_init_sample,
    minimize=True,
    log_file="test.log",
    verbose=True,
    optimizer='MIES',
    infill='EI',
    available_gpus=[1, 2],
    n_job=2,
    n_point=2,
    #n_point=1,
    #n_job=1,
    warm_data_file="example_warm_data.json")

opt.run()
Ejemplo n.º 6
0
loss_model = RandomForest(levels=search_space.levels, n_estimators=10)
opt = mipego(search_space,
             objective,
             time_model,
             loss_model,
             ftarget=None,
             minimize=True,
             noisy=False,
             max_eval=None,
             max_iter=n_step,
             infill='HVI',
             n_init_sample=n_init_sample,
             n_point=1,
             n_job=n_job,
             n_restart=None,
             max_infill_eval=None,
             wait_iter=3,
             optimizer='MIES',
             log_file=None,
             data_file=None,
             verbose=False,
             random_seed=None,
             available_gpus=available_gpus,
             bi=True,
             save_name=save_name,
             ref_time=None,
             ref_loss=None,
             ignore_gpu=ignore_gpu,
             eval_epochs=eval_epochs,
             data_augmentation=True,
             use_validation=True)
Ejemplo n.º 7
0
        tmp = 0
    else:
        tmp = 1
    return np.sum(x_r**2.) + abs(x_i - 10) / 123. + tmp * 2.


C = ContinuousSpace([-5, 5], 'C') * 2
I = OrdinalSpace([-100, 100], 'I')
N = NominalSpace(['OK', 'A', 'B', 'C', 'D', 'E'], 'N')

search_space = C * I * N

model = RandomForest(levels=search_space.levels)
# model = RrandomForest(levels=search_space.levels, seed=1, max_features='sqrt')

opt = mipego(search_space,
             obj_func,
             model,
             max_iter=n_step,
             random_seed=None,
             n_init_sample=n_init_sample,
             n_point=2,
             n_job=2,
             minimize=True,
             log_file="test.log",
             verbose=True,
             optimizer='MIES',
             available_gpus=[1, 2])

opt.run()
Ejemplo n.º 8
0
stack_sizes = OrdinalSpace([1, 5], 'stack') * 3
activation = NominalSpace(activation_fun_conv, "activation")  # activation function
activation_dense = NominalSpace(activation_fun, "activ_dense") # activation function for dense layer
step = NominalSpace([True, False], "step")  # step
global_pooling = NominalSpace([True, False], "global_pooling")  # global_pooling
drop_out = ContinuousSpace([1e-5, .9], 'dropout') * 4        # drop_out rate
lr_rate = ContinuousSpace([1e-4, 1.0e-0], 'lr')        # learning rate
l2_regularizer = ContinuousSpace([1e-5, 1e-2], 'l2')# l2_regularizer
search_space =  stack_sizes * strides * filters *  kernel_size * activation * activation_dense * drop_out * lr_rate * l2_regularizer * step * global_pooling 


print('starting program...')    
available_gpus = gp.getAvailable(limit=16)
print(available_gpus)


# use random forest as the surrogate model 
model = RandomForest(levels=search_space.levels)
opt = mipego(search_space, objective, model, ftarget=None,
                 minimize=True, noisy=False, max_eval=None, max_iter=n_step, 
                 infill='EI', n_init_sample=10, n_point=3, n_job=3, 
                 n_restart=None, max_infill_eval=None, wait_iter=3, optimizer='MIES', 
                 log_file=None, data_file=None, verbose=False, random_seed=None,
                 available_gpus=available_gpus)



incumbent, stop_dict = opt.run()
print (incumbent, stop_dict)

Ejemplo n.º 9
0
    x_r, x_i, x_d = np.array([x['C_0'], x['C_1']]), x['I'], x['N']
    if x_d == 'OK':
        tmp = 0
    else:
        tmp = 1
    return np.sum(x_r**2.) + abs(x_i - 10) / 123. + tmp * 2.


C = ContinuousSpace([-5, 5], 'C') * 2
I = OrdinalSpace([-100, 100], 'I')
N = NominalSpace(['OK', 'A', 'B', 'C', 'D', 'E'], 'N')

search_space = C * I * N

model = RandomForest(levels=search_space.levels)
# model = RrandomForest(levels=search_space.levels, seed=1, max_features='sqrt')

opt = mipego(search_space,
             obj_func,
             model,
             max_iter=n_step,
             random_seed=None,
             n_init_sample=n_init_sample,
             n_point=1,
             n_job=1,
             minimize=True,
             verbose=True,
             optimizer='MIES')

opt.run()