Esempio n. 1
0
    def optimize(self, f, maximize=True, pmap=map):

        seed = self.seed if self.seed else random.randint(0, 9999)
        params = {'n_iterations': self.num_evals,
                  'random_seed': seed,
                  'n_iter_relearn': 3,
                  'verbose_level': 0}
        n_dimensions = len(self.lb)

        print('lb %s' % str(self.lb))
        print('ub %s' % str(self.ub))

        if maximize:
            def obj(args):
                kwargs = dict([(k, v) for k, v in zip(sorted(self.bounds.keys()), args)])
                return -f(**kwargs)

        else:
            def obj(args):
                kwargs = dict([(k, v) for k, v in zip(sorted(self.bounds.keys()), args)])
                return f(**kwargs)

        mvalue, x_out, error = bayesopt.optimize(obj, n_dimensions,
                                                 self.lb, self.ub, params)
        best = dict([(k, v) for k, v in zip(sorted(self.bounds.keys()), x_out)])
        return best, None
Esempio n. 2
0
    def optimize(self, f, maximize=True, pmap=map):

        seed = self.seed if self.seed else random.randint(0, 9999)
        params = {
            'n_iterations': self.num_evals,
            'random_seed': seed,
            'n_iter_relearn': 3,
            'verbose_level': 0
        }
        n_dimensions = len(self.lb)

        print('lb %s' % str(self.lb))
        print('ub %s' % str(self.ub))

        if maximize:

            def obj(args):
                kwargs = dict([
                    (k, v) for k, v in zip(sorted(self.bounds.keys()), args)
                ])
                return -f(**kwargs)

        else:

            def obj(args):
                kwargs = dict([
                    (k, v) for k, v in zip(sorted(self.bounds.keys()), args)
                ])
                return f(**kwargs)

        mvalue, x_out, error = bayesopt.optimize(obj, n_dimensions, self.lb,
                                                 self.ub, params)
        best = dict([(k, v)
                     for k, v in zip(sorted(self.bounds.keys()), x_out)])
        return best, None
def tune():
	"""Tunes hyperparameters of a feed forward net using Bayesian Optimization.

	Returns:
		mvalue: float. Best value of the cost function found using BayesOpt.
		x_out: 1D array. Best hyper-parameters found.
	"""
	params = {}
	params['n_iterations'] = 50
	params['n_iter_relearn'] = 1
	params['n_init_samples'] = 2

	print "*** Model Selection with BayesOpt ***"
	n = 6  # n dimensions
	# params: #layer, width, dropout, nonlinearity, l1_rate, l2_rate
	lb = np.array([1 , 10 , 0., 0., 0., 0.])
	ub = np.array([10, 500, 1., 1., 0., 0.])

	start = clock()
	mvalue, x_out, _ = bayesopt.optimize(cost_func, n, lb, ub, params)

	# Usage of BayesOpt with discrete set of values for hyper-parameters.

	#layers = [1, 3, 5, 7, 9, 10]
	#hsizes = [10, 50, 100, 150, 200, 300]
	#drates = [0.0, .1, .3, .5, .7, .9]
	#x_set = np.array([[layers, hsizes, drates], dtype=float).transpose()
	#mvalue, x_out, _ = bayesopt.optimize_discrete(cost_func, x_set, params)

	print "Result", mvalue, "at", x_out
	print "Running time:", clock() - start, "seconds"
	return mvalue, x_out
def tune(i, nonlin):
    params = {}
    params['n_iterations'] = 300
    params['n_iter_relearn'] = 1
    params['n_init_samples'] = 2
    
    print "*** Model Selection with BayesOpt ***"
    
    n = 6                   # n dimensions
    if (nonlin == theano.tensor.nnet.relu):
        lb = np.array([1, 10,  .0005, .0009, 0, i])
        ub = np.array([5, 300, .00121,    .001,  .5, i])
    else:
        lb = np.array([1, 10,  .0005, .0005, 0, i])
        ub = np.array([5, 300, .1,    .1,  .5, i])

    start = clock()
    mvalue, x_out, error = bayesopt.optimize(bo_costfunc, n, lb, ub, params)
    
    #mvalue, x_out, error = bayesopt.optimize_discrete(bo_costfunc, x_set, params)
    print "Result", mvalue, "at", x_out
    print "Running time:", clock() - start, "seconds"
    return mvalue, x_out, error
Esempio n. 5
0
# to a default value.
params = {}
params['n_iterations'] = 50
params['n_init_samples'] = 20
params['crit_name'] = "cSum(cEI,cDistance)"
params['crit_params'] = [1, 0.5]
params['kernel_name'] = "kMaternISO3"
print "Callback implementation"

n = 2  # n dimensions
lb = np.zeros((n, ))
ub = np.ones((n, ))

start = clock()

mvalue, x_out, error = bayesopt.optimize(testfunc, n, lb, ub, params)

print "Result", x_out
print "Seconds", clock() - start

print "OO implementation"
bo_test = BayesOptTest(n)
bo_test.parameters = params
bo_test.lower_bound = lb
bo_test.upper_bound = ub

start = clock()
mvalue, x_out, error = bo_test.optimize()

print "Result", x_out
print "Seconds", clock() - start
Esempio n. 6
0
    #print "target", target
    e = quad(x, target)
    return e


# Initialize the parameters by default
params = {}  #bayesopt.initialize_params()

# We decided to change some of them
params['n_init_samples'] = 30
params['n_iter_relearn'] = 20
params['noise'] = 1e-10
params['kernel_name'] = "kMaternISO5"
params['kernel_hp_mean'] = [1]
params['kernel_hp_std'] = [5]
params['surr_name'] = "sStudentTProcessNIG"
#params['crit_name'] = "cMI"

dim = 20
lb = np.ones((dim, )) * 0
ub = np.ones((dim, )) * 20

mvalue, x_out, error = bayesopt.optimize(func, dim, lb, ub, params)

print "Result", mvalue, x_out

print "Global optimal", 0, np.arange(1, 1 + dim)

print "Y Gap", mvalue
print "X Gap", math.sqrt(mvalue * dim)
Esempio n. 7
0
    def optimize(self):
        min_val, x_out, error = bo.optimize(self.evaluateSample, self.n_dim,
                                            self.lb, self.ub, self.params)

        return min_val, x_out, error
Esempio n. 8
0
# to a default value.
params = {} 
params['n_iterations'] = 50
params['n_init_samples'] = 20
params['crit_name'] = "cSum(cEI,cDistance)"
params['crit_params'] = [1, 0.5]
params['kernel_name'] = "kMaternISO3"
print "Callback implementation"

n = 2                     # n dimensions
lb = np.zeros((n,))
ub = np.ones((n,))

start = clock()

mvalue, x_out, error = bayesopt.optimize(testfunc, n, lb, ub, params)

print "Result", x_out
print "Seconds", clock() - start


print "OO implementation"
bo_test = BayesOptTest(n)
bo_test.parameters = params
bo_test.lower_bound = lb
bo_test.upper_bound = ub

start = clock()
mvalue, x_out, error = bo_test.optimize()

print "Result", x_out
Esempio n. 9
0

params = {}  #bayesopt.initialize_params()

# We decided to change some of them
params['n_iterations'] = 200
params['n_init_samples'] = 5
params['n_iter_relearn'] = 5
# params['l_type'] = "mcmc"
params['noise'] = 1
params['kernel_name'] = "kMaternARD5"
# params['kernel_hp_mean'] = [1]
# params['kernel_hp_std'] = [5]
# params['surr_name'] = "sStudentTProcessNIG"
# params['surr_name'] = "sStudentTProcessNIG"
params['load_save_flag'] = 2

dim = 7
lb = np.ones((dim, )) * 0.
ub = np.ones((dim, )) * 1.

mvalue, x_out, error = bayesopt.optimize(compute_marginals, dim, lb, ub,
                                         params)

params = np.array([
    3.0000, 0.0911, 0, 21.9868, 0, 22.3006, 0.2053, 0.7538, -1.4208, 0.8877,
    1.0000, 0, 0, 0
])
params = np.array([0.0911, 21.9868, 22.3006, 0.2053, 0.7538, -1.4208, 0.8877])

#python /Users/csmfindling/spearmint/spearmint/spearmint/main.py --driver=local --method=GPEIOptChooser config.pb
        with open('/tmp/results.pkl', 'w') as f:
            data["kwargs"].append(kwargs)
            data["results"].append(result)
            pickle.dump(data, f)
        return result

    return wrapper


if __name__ == '__main__':
    with open('/tmp/data.pkl', 'r') as f:
        context = pickle.load(f)
        objfun = context['objfun']
        search = context['search']
        typemap = context['typemap']

    objfun = prepare_objfun(objfun, search)

    params = {}
    params['n_iterations'] = 150
#    params['n_iter_relearn'] = 5

    # configuration used throughout experiments
    # search={'logC': [-8, 1], 'logGamma': [-8, 1]}
    n = 2                     # n dimensions
    lb = -8 * np.ones((n,))
    ub = np.ones((n,))

    mvalue, x_out, error = bayesopt.optimize(objfun, n, lb, ub, params)
    print('mvalue: %1.3f' % mvalue)
Esempio n. 11
0
import traceback
from bayesian_tools import evaluate_old, evaluate_MT_linear

parser = argparse.ArgumentParser("")
parser.add_argument('-d', '--ndim', type=int, required=True)
parser.add_argument('--device', default="cpu")
parser.add_argument('-n', '--norm', action="store_true")
args = parser.parse_args()


def evaluate(vec):
    #return -evaluate_old(vec)
    return -evaluate_MT_linear(vec, norm=args.norm, device=args.device)


params = {}
params['n_iterations'] = 1000
params['n_iter_relearn'] = 5
params['n_init_samples'] = 2

n = args.ndim  # n dimensions
lb = numpy.zeros((n, ))
ub = numpy.ones((n, ))

if __name__ == "__main__":
    start = clock()
    mvalue, x_out, error = bayesopt.optimize(evaluate, n, lb, ub, params)

    print("Result", mvalue, "at", x_out)
    print("Running time:", clock() - start, "seconds")
Esempio n. 12
0
	'edge.fused(flat-map, filter)-group-and-aggregate-prepare.queueSize',
	'edge.group-and-aggregate-prepare-group-and-aggregate.queueSize',
	'edge.group-and-aggregate-mapSink(counts).queueSize'
]

# Function for testing.
def run_pipeline(Xin):
	params = ['/usr/bin/java', '-cp', 'wordcount-1.0-SNAPSHOT.jar', 'com.hazelcast.jet.WordCount']
	try:
		for i, x in enumerate(Xin.tolist()):
			params.append('%s=%d' % (options[i], x))
		start_time = time.time()
		subprocess.call(params, stderr=DEVNULL)
		elapsed_time = time.time() - start_time
		return elapsed_time
	except Exception, err:
		traceback.print_exc()
		pass

lower = np.array([1, 1, 1, 1, 1, 128, 128, 128, 128]).astype('double')
upper = np.array([36, 36, 36, 36, 36, 4096, 4096, 4096, 4096]).astype('double')

y_out, x_out, error = bayesopt.optimize(run_pipeline, len(options), lower, upper, {})



defaults = np.array([36, 36, 36, 36, 36, 1024, 1024, 1024, 1024])
print("Running with defaults: %s" % defaults)
print(run_pipeline(defaults))
print("Running with best outcome %s" % x_out)
print(run_pipeline(x_out))
Esempio n. 13
0
 def optimize(self):
     min_val, x_out, error = bo.optimize(self.evalfunc, self.n_dim,
                                         self.lower_bound, self.upper_bound,
                                         self.params)
     
     return min_val, x_out, error
Esempio n. 14
0
 def optimize(self):
     return bayesopt.optimize(self.train_valid, self.dim, self.lower_bound, self.upper_bound, self.hyperparams)
Esempio n. 15
0
    def run(self, problem):
        """Optimize the problem using your choice of Scipy optimizer.

        Args
        ----
        problem : `Problem`
            Our parent `Problem`.
        """

        # Metadata Setup
        self.metadata = create_local_meta(None, "BayesOpt")
        self.iter_count = 0
        update_local_meta(self.metadata, (self.iter_count, ))

        # Initial Run
        with problem.root._dircontext:
            problem.root.solve_nonlinear(metadata=self.metadata)

        pmeta = self.get_desvar_metadata()
        self.params = list(pmeta)
        self.objs = list(self.get_objectives())
        con_meta = self.get_constraint_metadata()
        self.cons = list(con_meta)
        self.con_cache = self.get_constraints()

        self.opt_settings['disp'] = self.options['disp']

        bopt_params = {}
        bopt_params['n_iterations'] = self.options['n_iterations']
        bopt_params['n_inner_iterations'] = self.options['n_inner_iterations']
        bopt_params['n_iter_relearn'] = self.options['n_iter_relearn']
        bopt_params['n_init_samples'] = self.options['n_init_samples']
        bopt_params['noise'] = self.options['noise']
        bopt_params['surr_name'] = self.options['surr_name']

        # Size Problem
        nparam = 0
        for param in itervalues(pmeta):
            nparam += param['size']
        x_init = np.empty(nparam)
        i = 0

        # Initial Parameters
        lower_bounds = []
        upper_bounds = []

        for name, val in iteritems(self.get_desvars()):
            size = pmeta[name]['size']
            x_init[i:i + size] = val
            i += size

            # Bounds if our optimizer supports them
            meta_low = pmeta[name]['lower']
            meta_high = pmeta[name]['upper']
            for j in range(0, size):

                if isinstance(meta_low, np.ndarray):
                    p_low = meta_low[j]
                else:
                    p_low = meta_low

                if isinstance(meta_high, np.ndarray):
                    p_high = meta_high[j]
                else:
                    p_high = meta_high

                lower_bounds.append(p_low)
                upper_bounds.append(p_high)

        # optimize
        self._problem = problem

        min_value, xout, error = bayesopt.optimize(self._objfunc,
                                                   len(lower_bounds),
                                                   np.asarray(lower_bounds),
                                                   np.asarray(upper_bounds),
                                                   bopt_params)

        # Run one more iteration, at the computed minimum
        self._objfunc(xout)

        self._problem = None
        self.result = min_value  # TODO: what is this supposed to return?
        self.exit_flag = 1  # TODO: handle optimization failure?

        if self.options['disp']:
            print('Optimization Complete')
            print('-' * 35)
Esempio n. 16
0
'''

listas = []
param_solution = []
n_experimentos = 5

function = fun.PushButton(headless_mode=True,
                          variation=VARIATION)  # Inicializacion

# Coordenadas de la tarea avoid_obstacle
# function.set_coords(coords=VARIATION)

for i in range(n_experimentos):
    print(i)
    function.clean_lists()
    mvalue, x_out, error = bayesopt.optimize(function.push_button, n, lb, ub,
                                             params)
    print("Result", mvalue, "at", x_out)
    listas_optimizacion = function.return_lists()
    listas.append(listas_optimizacion)
    param_solution.append(x_out)

pickle.dump(
    listas,
    open(TASK_DIR + "listas_bayesopt_" + TASK_NAME + "_" + VARIATION + ".p",
         "wb"))
pickle.dump(
    param_solution,
    open(TASK_DIR + "solucion_bayesopt_" + TASK_NAME + "_" + VARIATION + ".p",
         "wb"))

function.shutdown()  # Apagado
    target = np.arange(1, 1 + len(x))
    target2 = np.ones(len(x)) * 10
    # print "target", target
    e = quad(x, target)
    return e


# Initialize the parameters by default
params = bayesopt.initialize_params()

# We decided to change some of them
params["n_init_samples"] = 150
params["n_iter_relearn"] = 20
# params['noise'] = 0.01
params["kernel_name"] = "kMaternISO3"
params["kernel_hp_mean"] = [1]
params["kernel_hp_std"] = [5]
params["surr_name"] = "sStudentTProcessNIG"

dim = 20
lb = np.ones((dim,)) * 0
ub = np.ones((dim,)) * 20

mvalue, x_out, error = bayesopt.optimize(func, dim, lb, ub, params)

print "Result", mvalue, x_out

print "Global optimal", 0, np.arange(1, 1 + dim)

print "Distance", math.sqrt(mvalue * dim)
Esempio n. 18
0
 def optimize(self):
     min_val, x_out, error = bo.optimize(self.evaluateSample, self.n_dim,
                                         self.lb, self.ub,
                                         self.params)
     
     return min_val, x_out, error
Esempio n. 19
0
        with open('/tmp/results.pkl', 'w') as f:
            data["kwargs"].append(kwargs)
            data["results"].append(result)
            pickle.dump(data, f)
        return result

    return wrapper


if __name__ == '__main__':
    with open('/tmp/data.pkl', 'r') as f:
        context = pickle.load(f)
        objfun = context['objfun']
        search = context['search']
        typemap = context['typemap']

    objfun = prepare_objfun(objfun, search)

    params = {}
    params['n_iterations'] = 150
    #    params['n_iter_relearn'] = 5

    # configuration used throughout experiments
    # search={'logC': [-8, 1], 'logGamma': [-8, 1]}
    n = 2  # n dimensions
    lb = -8 * np.ones((n, ))
    ub = np.ones((n, ))

    mvalue, x_out, error = bayesopt.optimize(objfun, n, lb, ub, params)
    print('mvalue: %1.3f' % mvalue)
import numpy as np
import bayesopt as bo
import RNN_train_wrapper as rtw
# from bayesoptmodule import BayesOptDiscrete

params = {}
# params['n_iterations'] = 50
# params['n_iter_relearn'] = 5
# params['n_init_samples'] = 2
dim = 4  # n dimensions
# lb = np.array([500.0,100.0,100.0,100.0])
# ub = np.array([2000.0,512.0,256.0,1000.0])
lb = np.ones((dim, )) * 0
ub = np.ones((dim, )) * 20

print "Callback implementation"

mvalue, x_out, error = bo.optimize(rtw.wrapper, dim, lb, ub, params)

print "Result", mvalue, "at", x_out