def plot_randomsearch(): solver = optunity.available_solvers()[3] pars, details, _ = optunity.minimize(f, num_evals=100, x=[-5, 5], y=[-5, 5], solver_name=solver)
def optimize_objective(f): logs = {} solvers = optunity.available_solvers() for solver in solvers: pars, details, _ = optunity.minimize(f, num_evals=100, x=[-5, 5], y=[-5, 5], solver_name=solver) logs[solver] = np.array( [details.call_log['args']['x'], details.call_log['args']['y']]) colors = ['r', 'g', 'b', 'y', 'k', 'y', 'r', 'g'] markers = ['x', '+', 'o', 's', 'p', 'x', '+', 'o'] # compute contours of the objective function delta = 0.025 x = np.arange(-5.0, 5.0, delta) y = np.arange(-5.0, 5.0, delta) X, Y = np.meshgrid(x, y) Z = f(X, Y) CS = plt.contour(X, Y, Z) plt.clabel(CS, inline=1, fontsize=8, alpha=0.5) for i, solver in enumerate(solvers): plt.scatter(logs[solver][0, :], logs[solver][1, :], c=colors[i], marker=markers[i], alpha=0.80) plt.xlim([-5, 5]) plt.ylim([-5, 5]) plt.axis('equal') plt.legend(solvers) plt.show()
def optimize(startin_point): print "\nBegin optimization" midpoint = startin_point constraints = {'lat':[midpoint[0]-0.1 , midpoint[0]+0.1], 'lon': [midpoint[1]-0.1 , midpoint[1]+0.1]} print "\tStarting from:\t\t", midpoint print "\tCurrent distance:\t", function_to_optimize(midpoint[0], midpoint[1])[0] for sname in optunity.available_solvers(): #['particle swarm'] #create a solver suggestion = optunity.suggest_solver(num_evals=500, solver_name=sname, **constraints) solver = optunity.make_solver(**suggestion) #optimize the function optimum = optunity.optimize(solver, function_to_optimize, maximize=False, max_evals=100) print "\n\t===================================" print "\tSolver name:\t", suggestion['solver_name'] print "\tMidpoint:\t", [optimum[0]['lat'], optimum[0]['lon']] print "\tDistance:\t", optimum[1][0][0] print "\tIterations:\t", optimum[1][1]['num_evals']
print(info.optimum) solution = dict([(k, v) for k, v in optimal_configuration.items() if v is not None]) print('Solution\n========') print("\n".join(map(lambda x: "%s \t %s" % (x[0], str(x[1])), solution.items()))) #basic optim def create_objective_function(): xoff = random.random() yoff = random.random() def f(x, y): return (x - xoff)**2 + (y - yoff)**2 return f solvers = optunity.available_solvers() print('Available solvers: ' + ', '.join(solvers)) f = create_objective_function() logs = {} for solver in solvers: pars, details, _ = optunity.minimize(f, num_evals=100, x=[-5, 5], y=[-5, 5], solver_name=solver) logs[solver] = np.array( [details.call_log['args']['x'], details.call_log['args']['y']])
#!/usr/bin/env python # A simple smoke test for all available solvers. import optunity def f(x, y): return x + y solvers = optunity.available_solvers() for solver in solvers: # simple API opt, _, _ = optunity.maximize(f, 100, x=[0, 5], y=[-5, 5], solver_name=solver) # expert API suggestion = optunity.suggest_solver(num_evals=100, x=[0, 5], y=[-5, 5], solver_name=solver) s = optunity.make_solver(**suggestion) # without parallel evaluations opt, _ = optunity.optimize(s, f) # with parallel evaluations opt, _ = optunity.optimize(s, f, pmap=optunity.pmap)
def main(): parser = argparse.ArgumentParsers( description="Run grid optimization on a single subject") parser.add_argument('msh', type=str, help="Subject Gmsh .msh realistic head model") parser.add_argument('weights', type=str, help=".npy binary containing a weight for each " "tetrahedron") parser.add_argument('centroid', type=str, help="Coordinates in T1w space for a centroid " "to the weight function to optimize over") parser.add_argument('coil', type=str, help="Path to SimNIBS coil file") parser.add_argument('output_file', type=str, help="Output file storing optimal coordinates") parser.add_argument('output_file', type=str, help="Output file storing optimal coordinates") parser.add_argument('--history', type=str, help="Output file to store history of scores" " into for convergence/visualization") parser.add_argument('--workdir', type=str, help="Working directory to run simulations in") parser.add_argument('--ncpus', type=int, help="Number of threads to use for each batch " "of simulations. Default = 8") parser.add_argument('--batchsize', type=int, help="Number of simulations to run simultaneously, " "will default to half the number of cpus if not " "specified.") parser.add_argument('--solver', type=int, help="Optunity solver to use, " "defaults to particle swarm", choices=optunity.available_solvers()) args = parser.parse() msh = args.msh wf = np.load(args.weights) centroid = np.genfromtxt(args.centroid) coil = args.coil ncpus = args.ncpus or 8 batch_size = args.batchsize or (ncpus // 2 - 1) history = args.history workdir = args.workdir or "/tmp/" output_file = args.output_file solver = args.solver or "particle swarm" # Construct objective function object f = FieldFunc(mesh_file=msh, initial_centroid=centroid, tet_weights=wf, coil=coil, field_dir=workdir, cpus=ncpus) # Set up optunity optimization # Can we feed a list of inputs here? pars, details, _ = optunity.minimize(f.evaluate, num_evals=100, x=[f.bounds[0, 0], f.bounds[0, 1]], y=[f.bounds[1, 0], f.bounds[1, 1]], theta=[0, 180], solver_name=solver)