Ejemplo n.º 1
0
                                                        bounds[0, 0]) / 2

with open('dataset.pickle', 'wb') as f:
    pickle.dump(dataset, f)

for ii in range(iteration):
    print(
        '********************************************************************')
    print('iteration', ii)
    model = BO(dataset, bounds, bfgs_iter, debug=False)
    best_x = model.best_x
    best_y = model.best_y
    print('best_x', best_x)
    print('best_y', best_y)

    y_pred, y_var = model.predict(X_star)
    train_x = dataset['train_x']
    train_y = dataset['train_y']
    train_x_real = train_x * (bounds[0, 1] - bounds[0, 0]) + (bounds[0, 1] +
                                                              bounds[0, 0]) / 2

    p = np.minimum(int(K / 5), 5)

    def task(x0):
        x0 = model.optimize_wEI(x0)
        wEI_tmp = model.calc_wEI(x0)
        return x0, wEI_tmp

    pool = multiprocessing.Pool(processes=5)
    x0_list = []
    for i in range(int(K / p)):
Ejemplo n.º 2
0
bfgs_iter = conf['max_iter']
num_layers = conf['num_layers']
layer_sizes = conf['layer_size']
activations = conf['activation']
l1 = conf['l1']
l2 = conf['l2']
scale = conf['scale']

for i in dataset.keys():
    print(i, dataset[i].shape)
tmp = np.copy(dataset)

model = BO(name,
           num_models,
           dataset,
           bfgs_iter=bfgs_iter,
           debug=False,
           scale=scale,
           num_layers=num_layers,
           layer_sizes=layer_sizes,
           activations=activations,
           l1=l1,
           l2=l2)
pys, ps2s = model.predict(dataset['test_x'])
print('pys', pys.T)
print('ps2s', ps2s.T)
print('true', dataset['test_y'])
delta = pys.T - dataset['test_y']
print('delta', delta)
print('MSE', np.dot(delta, delta.T))