예제 #1
0
def opt(func, bound, niter=30):
    ''' optimize a function in a given bounds limit, returns [min, max]'''

    stepsize = None
    center = []

    for interval in bound:
        center.append((interval[0] + interval[1]) / 2.0)
        width = interval[1] - interval[0]

        if stepsize is None or width < stepsize:
            stepsize = width

    minimum = optimize.basinhopping(func, \
        center, minimizer_kwargs=dict(method='L-BFGS-B', bounds=bound),\
        niter=niter, stepsize=stepsize).fun

    maximum = -optimize.basinhopping(lambda x: -func(x), \
        center, minimizer_kwargs=dict(method='L-BFGS-B', bounds=bound),\
        niter=niter, stepsize=stepsize).fun

    # scipy bug: the doc says basinhopping returns a Result object,
    # but I sometimes get a float, sometimes a numpy float, some sometimes and numpy array
    minimum = float(minimum)
    maximum = float(maximum)

    return [minimum, maximum]
예제 #2
0
파일: solutions.py 프로젝트: tkchris93/ACME
def prob2():
    """Explore the documentation on the function scipy.optimize.basinhopping()
    online or via IPython. Use it to find the global minimum of the multmin()
    function given in the lab, with initial point x0 = np.array([-2, -2]) and
    the Nelder-Mead algorithm. Try it first with stepsize=0.5, then with
    stepsize=0.2.

    Return the minimum value of the function with stepsize=0.2.
    Print a statement answering the following question:
        Why doesn't scipy.optimize.basinhopping() find the minimum the second
        time (with stepsize=0.2)?
    """
    def multimin(x):
        r = np.sqrt((x[0]+1)**2 + x[1]**2)
        return r**2 *(1+ np.sin(4*r)**2)
    
    x0 = np.array([-2,-2])
    correct_x = opt.basinhopping(multimin, x0, stepsize=0.5, 
                        minimizer_kwargs={'method':'nelder-mead'}).x
    
    wrong_x = opt.basinhopping(multimin, x0, stepsize=0.2, 
                        minimizer_kwargs={'method':'nelder-mead'}).x
    
    output_string = """
    When stepsize=0.2, the step is not large enough to jump out of the 
    basin. Therefore, we get the incorrect answer.
    """
    return multimin(wrong_x)
 def minimize_negative_acquisition(self, gpreg):
     # minimization of negative acquisition function
     vals, par = [], []
     x0 = list(self.rand.uniform(np.array(self.bounds).T[0], np.array(self.bounds).T[1],
                                 size=(self.n_iters_aqui - 1, self.dim)
                                 )) + [self.x[int(np.argmax(self.y))]]
     for x in x0:
         if self.acquisition_func == "EI":
             if self.bashop:
                 opti = basinhopping(expected_improvement, x0=x,
                                     minimizer_kwargs={"method": "L-BFGS-B",
                                                       "bounds": self.bounds,
                                                       "args": (self.aquis_par[-1],
                                                                np.max(self.y),
                                                                gpreg, self.dim,)})
             else:
                 opti = minimize(expected_improvement, x0=x, method="L-BFGS-B",
                                 args=(self.aquis_par[-1], np.max(self.y),
                                       gpreg, self.dim,),
                                 bounds=self.bounds)
         else:
             if self.bashop:
                 opti = basinhopping(upper_confidence_bound, x0=x,
                                     minimizer_kwargs={"method": "L-BFGS-B",
                                                       "bounds": self.bounds,
                                                       "args": (self.aquis_par[-1],
                                                                gpreg, self.dim,)})
             else:
                 opti = minimize(upper_confidence_bound, x0=x, method="L-BFGS-B",
                                 args=(self.aquis_par[-1], gpreg, self.dim,),
                                 bounds=self.bounds)
         par.append(opti.x)
         vals.append(opti.fun)
     return np.array(vals), np.array(par)
    def bench_run(self, **minimizer_kwargs):
        """
        do an optimization test starting at x0 for all the optimizers
        """
        kwargs = self.minimizer_kwargs

        if hasattr(self.fun, "temperature"):
            kwargs["T"] = self.function.temperature
        if hasattr(self.fun, "stepsize"):
            kwargs["stepsize"] = self.function.stepsize
        minimizer_kwargs = {"method": "L-BFGS-B"}
        x0 = self.get_random_configuration()

        # basinhopping - with gradient
        if hasattr(self.function, 'der'):
            minimizer_kwargs['jac'] = True
            t0 = time.time()
            res = basinhopping(
                self.energy_gradient, x0, accept_test=self.accept_test,
                callback=self.stop_criterion, niter=1000,
                minimizer_kwargs=minimizer_kwargs,
                **kwargs)
            t1 = time.time()
            res.success = True
            if not self.found_target(res):
                res.success = False
            self.add_result(res, t1 - t0, 'basinhopping')

        # basinhopping - no gradient
        x0 = self.get_random_configuration()
        minimizer_kwargs['jac'] = False
        t0 = time.time()

        res = basinhopping(
            self.fun, x0, accept_test=self.accept_test,
            callback=self.stop_criterion, niter=1000,
            minimizer_kwargs=minimizer_kwargs,
            **kwargs)

        t1 = time.time()
        res.success = True
        if not self.found_target(res):
            res.success = False
        self.add_result(res, t1 - t0, 'basinhopping - no gradient')

        # differential_evolution
        t0 = time.time()

        res = differential_evolution(self.fun,
                                     self.bounds,
                                     popsize=20,
                                     polish=True)

        t1 = time.time()
        if not self.found_target(res):
            res.success = False
        self.add_result(res, t1 - t0, 'differential_evolution')
예제 #5
0
def prob3():
    """Explore the documentation on the function scipy.optimize.basinhopping()
    online or via IPython. Use it to find the global minimum of the multmin()
    function given in the lab, with initial point x0 = np.array([-2, -2]) and
    the Nelder-Mead algorithm. Try it first with stepsize=0.5, then with
    stepsize=0.2. 

    Plot the multimin function and minima found using the code provided.
    Print statements answering the following questions:
        Which algorithms fail to find the global minimum?
        Why do these algorithms fail?

    Finally, return the global minimum.
    """
    # Define the function to be optimized and the initial condition.
    def multimin(x):
        r = np.sqrt((x[0]+1)**2 + x[1]**2)
        return r**2 *(1+ np.sin(4*r)**2)
    x0 = np.array([-2, -1.9])
    small_step = .2
    large_step = .5

    # Optimize using variations on Nelder-Mead.  NOTE: Here, each has been stored 
    # seperately for ease of plotting differently colored minimums.
    small = opt.basinhopping(multimin, x0, stepsize=small_step,
                            minimizer_kwargs={'method':'nelder-mead'})
    large = opt.basinhopping(multimin, x0, stepsize=large_step,
                            minimizer_kwargs={'method':'nelder-mead'})

    # Print the results.
    print("Stepsize:\t{}\nMinimum:\t{}\nX-Values:\t{}\n".format(small_step, 
                                                     small['fun'], small['x']))
    print("Stepsize:\t{}\nMinimum:\t{}\nX-Values:\t{}\n".format(large_step, 
                                                     large['fun'], large['x']))

    # Plot the multimin graph. Here, the points are colored differently for emphasis.
    xdomain = np.linspace(-3.5,1.5,70)
    ydomain = np.linspace(-2.5,2.5,60)
    X,Y = np.meshgrid(xdomain,ydomain)
    Z = multimin((X,Y))
    fig = plt.figure()
    ax1 = fig.add_subplot(111, projection='3d')
    ax1.plot_wireframe(X, Y, Z, linewidth=.5, color='c')
    ax1.scatter(x0[0], x0[1], multimin(x0), c='b')               # Initial pt: blue

    # Plot the results of the algorithms.
    ax1.scatter(small.x[0], small.x[1], small.fun, s=30, c='r')  # Small step: red
    ax1.scatter(large.x[0], large.x[1], large.fun, s=30, c='g')  # Large step: green
    plt.show()

    # Answer the problem questions.
    print("minimize() fails because it gets trapped in a basin.")
    print("0.2 fails because it is too small a stepsize to escape a basin.")

    # Return the correct global minimum.
    return large['fun']
예제 #6
0
 def test_pass_accept_test(self):
     # test passing a custom accept test
     # makes sure it's being used and ensures all the possible return values
     # are accepted.
     accept_test = MyAcceptTest()
     i = 1
     # there's no point in running it more than a few steps.
     basinhopping(func2d, self.x0[i], minimizer_kwargs=self.kwargs,
                  niter=10, disp=self.disp, accept_test=accept_test)
     assert_(accept_test.been_called)
def optimize_basin_hopping_multiple(crater):
	# return optimize_basin_hopping(crater)
	# crater = craters[0]
	x0 = [crater.x,crater.y,crater.hs,crater.D2,crater.hr,crater.aRepose,crater.lema,crater.lemp]
	# crater = craters[1]
	x1 = [crater.hs,crater.hr,crater.aRepose]
	x2 = [crater.hs,crater.hr,crater.aRepose]
	x3 = [crater.hs,crater.hr,crater.aRepose]
	angles = [-1,2.141592,4]
	print "DOing multiple"
	basinhopping(craterXY_multiple,np.hstack([x0,x1,x2,x3]),minimizer_kwargs={"method":"nelder-mead"},niter=1)
예제 #8
0
    def _partial_optimize(self, optimize_nodes, evaluate_nodes, fall_to_simplex=True, minimizer='Powell', use_basin=False, debug=False, minimizer_kwargs=None, basin_kwargs=None):
        """Optimize part of the model.

        :Arguments:
            nodes : iterable
                list nodes to optimize.
        """
        if minimizer_kwargs is None:
            minimizer_kwargs = {}
        if basin_kwargs is None:
            basin_kwargs = {}

        non_observeds = filter(lambda x: not x.observed, optimize_nodes)

        init_vals = [node.value for node in non_observeds]

        # define function to be optimized
        def opt(values):
            if debug: print(values)
            for value, node in zip(values, optimize_nodes):
                node.set_value(value)
            try:
                logp_optimize = [node.logp for node in optimize_nodes]
                logp_evaluate = [node.logp for node in evaluate_nodes]
                neglogp = -np.sum(logp_optimize) - np.sum(logp_evaluate)
                if debug: print(neglogp)
                return neglogp
            except pm.ZeroProbability:
                if debug: print('Outside support!')
                return np.inf

        # optimize
        if use_basin:
            try:
                minimizer_kwargs_passed = {'method': minimizer, 'options': minimizer_kwargs}
                basinhopping(opt, init_vals, minimizer_kwargs=minimizer_kwargs_passed, **basin_kwargs)
            except:
                if fall_to_simplex:
                    print("Warning: Powell optimization failed. Falling back to simplex.")
                    minimizer_kwargs_passed = {'method': minimizer, 'options': minimizer_kwargs}
                    basinhopping(opt, init_vals, minimizer_kwargs=minimizer_kwargs_passed, **basin_kwargs)
                else:
                    raise
        else:
            try:
                minimize(opt, init_vals, method=minimizer, options=minimizer_kwargs)
            except:
                if fall_to_simplex:
                    print("Warning: Powell optimization failed. Falling back to simplex.")
                    minimize(opt, init_vals, method='Nelder-Mead', options=minimizer_kwargs)
                else:
                    raise
예제 #9
0
 def optimize(self):
     mybounds = MyBounds(self._lower, self._upper)
     basinhopping(
         self._funcwrapped, self._xinit,
         minimizer_kwargs={
             'method': 'L-BFGS-B',
             'bounds': [
                 x for x in zip(self._lower, self._upper)
                 ]
             },
         accept_test=mybounds,
         niter=MAX_IT,
     )
예제 #10
0
def fit_gaussian_basis(x,y,angmom):
  nregrid=400
  xfit=np.linspace(1e-5,12.0,nregrid)
  yfit=scipy.interpolate.griddata(np.array(x),np.array(y),xfit,method='cubic')
  
  yfit=np.array(yfit)/np.array(xfit)**(angmom+1)
  
  
  errfunc=lambda p,x,y: sum_gauss(p,x)-y 
  objfunc=lambda p,x,y: np.sum(errfunc(p,x,y)**2/len(x))
  for ng in range(4,10):
    p0=[]
    for i in range(ng):
      p0.append(0.2)
    for i in range(ng):
      p0.append(0.5*2**i)
    #p1,cov,infodict,mesg,success=optimize.leastsq(errfunc,p0[:],args=(xfit,yfit),full_output=True)
    optres=optimize.basinhopping(objfunc,p0[:],
                                 disp=True,
                                 minimizer_kwargs={
                                    'args':(xfit,yfit),
                                    'options':{'disp':True}  },
                                    stepsize=2.0
                                )
    p1=optres.x
    coeffnorm=coeff_normalization(np.array(p1[ng:]),angmom)
    print("angular momentum",angmom)
    for i in range(ng):
      print(i+1,p1[i+ng],p1[i]/coeffnorm[i])
    #print('rms',np.sqrt(np.sum(infodict['fvec']**2)/len(infodict['fvec'])),'ng',ng)
    print('rms',np.sqrt(optres.fun))
예제 #11
0
    def maximize(self):
        """
        Maximizes the given acquisition function.

        Returns
        -------
        np.ndarray(N,D)
            Point with highest acquisition value.
        """
        cand = np.zeros([self.n_restarts, self.X_lower.shape[0]])
        cand_vals = np.zeros([self.n_restarts])

        f = partial(self._acquisition_fkt_wrapper, acq_f=self.objective_func)

        for i in range(self.n_restarts):
            start = np.array([self.rng.uniform(self.X_lower,
                                                self.X_upper,
                                                self.X_lower.shape[0])])
            res = optimize.basinhopping(
                f,
                start,
                minimizer_kwargs={
                    "bounds": zip(
                        self.X_lower,
                        self.X_upper),
                    "method": "L-BFGS-B"},
                disp=self.verbosity)

            cand[i] = res.x
            cand_vals[i] = res.fun
        best = np.argmax(cand_vals)
        return np.array([cand[best]])
예제 #12
0
def scipyBasinhopping(recipe, method='L-BFGS-B', *args, **kwargs):
    # new in scipy 0.12
    from scipy.optimize import basinhopping
    print "Fit using scipy's basin hopping optimizer"
    mybounds = MyBounds(recipe)
    mystep = MyRandomDisplacement(recipe)
    minimizer_kwargs = {'method': method,
                        # 'bounds': recipe.getBounds(),
                        'bounds': recipe.getBoundsFlat(),
                        'options': {'maxiter': 300},
                        }
    if kwargs.has_key('maxiter'):
        minimizer_kwargs['options'] = kwargs['maxiter']

    bh_kwargs = {'take_step': mystep,
                 'accept_test': mybounds}
    if kwargs.has_key('callback'):
        bh_kwargs['callback'] = kwargs['callback']
    if kwargs.has_key('maxxint'):
        bh_kwargs['niter'] = kwargs['maxxint']
    else:
        bh_kwargs['niter'] = 20

    res = basinhopping(recipe.scalarResidual, recipe.getValues(),
                       minimizer_kwargs=minimizer_kwargs,
                       **bh_kwargs)
    return {'x': res['x'],
            'raw': res}
예제 #13
0
 def optimize(self,niter=100):
     """
     Optimize kernel's hyperparameters using basin hopping.
     Args:
         niter: iterations in basin hopping function.
     Returns: optimized covariance parameters for kernel.
     """
     def optimized_marginal_likelihood(cov_args):
         """
         Negative log marginal likelihood to be minimized.
         if cov_args is out of bounds specified by kernel return inf
         """
         in_bounds=True
         for i in xrange(len(cov_args)):
             lower=self.kernel.cov_bounds[i][0]
             upper=self.kernel.cov_bounds[i][1]
             if cov_args[i]<lower or cov_args[i]>upper:
                 in_bounds=False
         if in_bounds==False:
             return 1e309
         else:
             cov_matrix=np.matrix([[self.kernel.cov(xm,xn,cov_args) for xm in self.X] for xn in self.X])+10**-9*np.identity(len(self.X))
         return -1*np.log(multivariate_normal(self.mean_vector,cov_matrix).pdf(self.Y))
     opt=basinhopping(optimized_marginal_likelihood,self.kernel.cov_args,niter=niter).x
     self.kernel.update_args(list(opt))
예제 #14
0
파일: minimizers.py 프로젝트: tBuLi/symfit
    def execute(self, **minimize_options):
        """
        Execute the basin-hopping minimization.

        :param minimize_options: options to be passed on to
            :func:`scipy.optimize.basinhopping`.
        :return: :class:`symfit.core.fit_results.FitResults`
        """
        if 'minimizer_kwargs' not in minimize_options:
            minimize_options['minimizer_kwargs'] = {}

        if 'method' not in minimize_options['minimizer_kwargs']:
            # If no minimizer was set by the user upon execute, use local_minimizer
            minimize_options['minimizer_kwargs']['method'] = self.local_minimizer.method_name()
        if 'jac' not in minimize_options['minimizer_kwargs'] and isinstance(self.local_minimizer, GradientMinimizer):
            # Assign the jacobian
            minimize_options['minimizer_kwargs']['jac'] = self.local_minimizer.wrapped_jacobian
        if 'constraints' not in minimize_options['minimizer_kwargs'] and isinstance(self.local_minimizer, ConstrainedMinimizer):
            # Assign constraints
            minimize_options['minimizer_kwargs']['constraints'] = self.local_minimizer.wrapped_constraints
        if 'bounds' not in minimize_options['minimizer_kwargs'] and isinstance(self.local_minimizer, BoundedMinimizer):
            # Assign bounds
            minimize_options['minimizer_kwargs']['bounds'] = self.local_minimizer.bounds

        ans = basinhopping(
            self.objective,
            self.initial_guesses,
            **minimize_options
        )
        return self._pack_output(ans)
예제 #15
0
def eq_ellipse_calc(cutout,weight,userNSuccess):
    
    mytakestep = MyTakeStep()
    
    global nSuccess_basinhopping
    nSuccess_basinhopping = userNSuccess
    
    global minVals_basinhopping
    minVals_basinhopping = np.empty(nSuccess_basinhopping)
    minVals_basinhopping[:] = np.nan
    
    global successCount_basinhopping
    successCount_basinhopping = 0
    
    #maxDist = max(np.hypot(*cutout.boundary.xy))
    
    minimizer_kwargs = {"args": (cutout,weight,#maxDist,
                                 ), "method": 'BFGS' }
    x0 = np.array([0,0,1,1,0])

    output = basinhopping(ellipse_in_cutout,x0,
                          niter=1000,minimizer_kwargs=minimizer_kwargs,
                          take_step=mytakestep, callback=print_fun)

    return output
예제 #16
0
 def __tune__(self):
     if self.minimizer == Minimizer.DifferentialEvolution:
         bounds = [
             self.spectralRadiusBound,
             self.inputScalingBound,
             self.reservoirScalingBound,
             self.leakingRateBound,
         ]
         result = optimize.differential_evolution(self.__reservoirTrain__, bounds=bounds)
         print("The Optimization results are :" + str(result))
         return result.x[0], result.x[1], result.x[2], result.x[3]
     else:
         bounds = [
             self.spectralRadiusBound,
             self.inputScalingBound,
             self.reservoirScalingBound,
             self.leakingRateBound,
         ]
         minimizer_kwargs = {"method": "TNC", "bounds": bounds, "options": {"eps": 0.005}}
         mytakestep = ParameterStep()
         result = optimize.basinhopping(
             self.__reservoirTrain__,
             x0=self.initialGuess,
             minimizer_kwargs=minimizer_kwargs,
             take_step=mytakestep,
             stepsize=0.005,
         )
         print("The Optimization results are :" + str(result))
         return result.x[0], result.x[1], result.x[2], result.x[3]
def main():
    x0 = 10.0* np.random.random(6)- 5.0

    mybounds = MyBounds()
    minimizer_kwargs = {"method":"L-BFGS-B"}
 
    ret=optimize.basinhopping(rastrigin, x0, niter=500, 
                        minimizer_kwargs=minimizer_kwargs,
                        callback=print_fun,
                        accept_test=mybounds)
    print(ret.x, ret.fun)
    print ret
    print xiteration[499]
    
    fig = plt.figure()
    xp=[]
    for i in range(0,len(xminima)):
        xp.append(np.linalg.norm(xminima[i]))
    
   
    plt.plot(xp, ".")
    plt.show()
    plt.savefig("sin.png")
    
    fig = plt.figure()
    plt.plot(fminima, ".")
    plt.show()
예제 #18
0
 def test_2d_nograd(self):
     # test 2d minimizations without gradient
     i = 1
     res = basinhopping(
         func2d_nograd, self.x0[i], minimizer_kwargs=self.kwargs_nograd, niter=self.niter, disp=self.disp
     )
     assert_almost_equal(res.x, self.sol[i], self.tol)
예제 #19
0
파일: sgoop.py 프로젝트: tlhr/plumology
    def fit(self, niter: int=1000) -> Tuple[float, np.ndarray]:
        """
        Optimize the spectral gap using basin hopping.

        Parameters
        ----------
        niter : Number of iterations to use.

        Returns
        -------
        coeffs : The found ideal coefficients.

        """
        dim = self._data.shape[1]
        rs = np.ones((dim,))
        rs *= 1 / np.sqrt((rs ** 2).sum())
        result = basinhopping(
            func=self._score,
            x0=rs,
            niter=niter,
            minimizer_kwargs=dict(
                method='L-BFGS-B',
                bounds=[(0.0001, 1.) for _ in range(dim)]
            ),
            stepsize=0.1,
            T=2.5
        )
        self.coeffs = result['x']
        return -result['fun'], self.coeffs
def call_bh_prelim_params(trans_data):
    '''This function calls the basinhopping algorithm to minimize the sum of the distances
        between the data points projected onto an arbitrary plane, and the center of mass
        of the projected points
        Parameters
            trans_data - data roughly translated to the origin
    '''
    
    minimizer_kwargs = {"method": "L-BFGS-B", "args": trans_data, "bounds": ((0,2*pi),(0,2*pi))}
    
    x0 = [pi, pi]
    
    ret = optimize.basinhopping(prelim_params_test, x0, minimizer_kwargs=minimizer_kwargs, niter = 200)
    print("Preliminary parameters minimization: x = [%.4f, %.4f], epsilon = %.4f" %\
          (ret.x[0], ret.x[1], ret.fun))

    z = array([cos(ret.x[0])*sin(ret.x[1]), sin(ret.x[0])*sin(ret.x[1]), cos(ret.x[1])])

    epsilon = ret.fun

    n = size(trans_data[0])

    r_guess = sqrt(epsilon / n )         # average distance from COM
    beta_guess = pi - arctan2(-z[0],z[2])
    alpha_guess = arctan2(z[1], sqrt((z[0])**2 + (z[2])**2))
    print('Initial guess for alpha, beta and r from preliminary parameter test:')
    print('alpha = %.4f' %alpha_guess)
    print('beta = %.4f' %beta_guess)
    print('r = %.4f' %r_guess)

    return r_guess, beta_guess, alpha_guess, z
예제 #21
0
def BasinHop(funcToMinimize,x0,boundsBasin,method="TNC",disp=False,
             interval=10,niter=30,niter_success=10,T=1,stepsize=0.001,
             ftol=1e-3,xtol=1e-3,gtol=1e-3):
    """
    Returns the result of basin hopping, given the arguments:

    Args:
        funcToMinimize: function to minimize, should take in the parameters
        as described by scipy's basinhopping routine
    
        x0: initial guessses, one per minimizer
    
        boundsBasin: the bounds, same size as x0. open ends of intervals are 
        None

        all others: consult basinhopping function
    """
    # the minimizer itself (for each 'basin') takes keywords
    # here, we are a little less 'picky' about the function tolerances
    # than before
    minimizer_kwargs = dict(method=method,bounds=boundsBasin,
                            options=dict(ftol=ftol,xtol=xtol,gtol=gtol))
    # use basin-hopping to get a solid guess of where we should  start
    obj = basinhopping(funcToMinimize,x0=x0,disp=disp,T=T,
                       stepsize=stepsize,minimizer_kwargs=minimizer_kwargs,
                       niter_success=niter_success,interval=interval,
                       niter=niter)
    return obj
 def test_2d(self):
     # test 2d minimizations with gradient
     i = 1
     res = basinhopping(func2d, self.x0[i], minimizer_kwargs=self.kwargs,
                        niter=self.niter, disp=self.disp)
     assert_almost_equal(res.x, self.sol[i], self.tol)
     self.assertTrue(res.nfev > 0)
예제 #23
0
파일: optimize.py 프로젝트: alchemyst/scipy
    def run_basinhopping(self):
        """
        Do an optimization run for basinhopping
        """
        kwargs = self.minimizer_kwargs
        if hasattr(self.fun, "temperature"):
            kwargs["T"] = self.function.temperature
        if hasattr(self.fun, "stepsize"):
            kwargs["stepsize"] = self.function.stepsize

        minimizer_kwargs = {"method": "L-BFGS-B"}

        x0 = self.function.initial_vector()

        # basinhopping - no gradient
        minimizer_kwargs['jac'] = False
        self.function.nfev = 0

        t0 = time.time()

        res = basinhopping(
            self.fun, x0, accept_test=self.accept_test,
            minimizer_kwargs=minimizer_kwargs,
            **kwargs)

        t1 = time.time()
        res.success = self.function.success(res.x)
        res.nfev = self.function.nfev
        self.add_result(res, t1 - t0, 'basinh.')
def cv():
  width = 10 * 24 # predict 10 days into future
  tests = 20
  starts = numpy.random.randint(width, len(train) - width, 20)
  
  for start in starts:
    cv_train = train.iloc[:start]
    cv_test = train.iloc[start:start + width]
    nbsm = NonlinearBikeShareModel(cv_train)
    res = optimize.basinhopping(
      nbsm,
      nbsm.beta0(),
      minimizer_kwargs={
        'method':'L-BFGS-B',
        'jac':True,
        'bounds': nbsm_train.bounds(),
        'options': {'disp': False, 'maxiter': 500}
      },
      disp=True,
      niter=2
    )
    print 'error:', nbsm(res.x)[0]

    plt.plot_date(cv_train['dates'], cv_train['count'], label='train')
    plt.plot_date(cv_train['dates'], nbsm.count(res.x), label='train fit')
    plt.plot_date(cv_test['dates'], cv_test['count'], label='test')
    plt.plot_date(cv_test['dates'], nbsm.predict(cv_test, res.x), label='predicted')
    plt.legend()
    plt.show()
    plt.clf()
예제 #25
0
파일: outline.py 프로젝트: jthacker/jtmri
def register_masked(ref_mask, img_mask, niter=None, x0=None):
    """Register img_mask to ref_mask, assumes inputs are contour masks
    Args:
        ref_mask -- reference image
        img_mask -- image to register to reference
        niter    -- number of iterations
        x0       -- starting value for transform (dx, dy, dsx, dsy)

    Returns: (matrix, image)
        matrix -- skimage AffineTranform object
        image  -- img warped to reference
    """
    assert ref_mask.ndim == img_mask.ndim == 2
    if niter is None:
        niter = 100
    x0 = x0 or (0, 0, 1, 1)
    ny, nx = ref_mask.shape
    img_mask_resized = skimage.transform.resize(img_mask, ref_mask.shape)
    minimizer_kwargs = dict(method='COBYLA')
    res = basinhopping(CostFunc(ref_mask, img_mask_resized),
                       x0=x0,
                       niter=niter,
                       minimizer_kwargs=minimizer_kwargs)
    tr = _x_to_affine(res.x)
    img_mask_warped = resize_and_warp_mask(img_mask, ref_mask.shape, tr)
    reg_res = RegistrationResult(tr, ref_mask, img_mask, img_mask_warped, res)
    return img_mask_warped, reg_res
예제 #26
0
def minimize(lipids, box, stepxy=0.5, steprot=50, contactthresh=2.6):
    from scipy.optimize import basinhopping
    # rotate in 10deg increments
    # translate in a 2x2 box in 0.25A increments
    # swap lipid conformer?
    headnames = [l.headname for l in lipids]
    zpos = [l.xyz[2] for l in lipids]
    neighbours = [l.neighbours for l in lipids]

    pos = np.vstack([l.xyz[:2] for l in lipids])
    x0 = pos.flatten().tolist()
    numlips = len(lipids)
    bounds = [(x - 1, x + 1) for x in x0]
    x0 += [180] * numlips  # Add the rotations
    bounds += [(0, 360)] * numlips  # Add the rotations
    stepsizes = np.ones(numlips * 2) * stepxy
    stepsizes = np.hstack((stepsizes, np.ones(numlips) * steprot))  # Add the rotations

    # define the new step taking routine and pass it to basinhopping
    take_step = RandomDisplacementBounds(np.vstack(bounds), stepsizes=stepsizes)
    minimizer_kwargs = dict(method="L-BFGS-B", bounds=bounds,
                            args=(lipids, headnames, zpos, contactthresh, neighbours, numlips, box))
    res = basinhopping(totalContacts, x0, minimizer_kwargs=minimizer_kwargs, disp=True, take_step=take_step, niter=10)
    newpos = res.x[:numlips * 2].reshape((-1, 2))
    newrot = res.x[numlips * 2:]
    return newpos, newrot
예제 #27
0
def find_assignment(variable_handler, atoms, max_num_resets, tol, verbose=True):
    init = variable_handler.dict_to_vector()

    def func(vector):
        return sum(evaluate(atom, variable_handler.vector_to_dict(vector)).norm for atom in atoms)

    xs = []
    fs = []
    options = {'ftol': tol**2}
    minimizer_kwargs = {"method": "SLSQP", "options": options}
    for i in range(max_num_resets):
        result = basinhopping(func, init, minimizer_kwargs=minimizer_kwargs)
        if verbose:
            print("iteration %d:" % (i+1))
            print(result)
        xs.append(result.x)
        fs.append(result.fun)
        if result.fun < tol:
            break
        init = np.random.rand(len(init))

    min_idx = min(enumerate(fs), key=lambda pair: pair[1])[0]
    assignment = variable_handler.vector_to_dict(xs[min_idx])
    norm = fs[min_idx]
    return assignment, norm
예제 #28
0
def test_basinhopping_2d_lmfit_vs_scipy():
    """Test basinhopping in lmfit versus scipy."""
    # SciPy
    def func2d(x):
        return np.cos(14.5*x[0] - 0.3) + (x[1]+0.2) * x[1] + (x[0]+0.2) * x[0]

    minimizer_kwargs = {'method': 'L-BFGS-B'}
    x0 = [1.0, 1.0]

    ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs, seed=7)

    # lmfit
    def residual_2d(params):
        x0 = params['x0'].value
        x1 = params['x1'].value
        return np.cos(14.5*x0 - 0.3) + (x1+0.2) * x1 + (x0+0.2) * x0

    pars = lmfit.Parameters()
    pars.add_many(('x0', 1.), ('x1', 1.))

    mini = lmfit.Minimizer(residual_2d, pars)
    kws = {'minimizer_kwargs': {'method': 'L-BFGS-B'}, 'seed': 7}
    out = mini.minimize(method='basinhopping', **kws)

    assert_allclose(out.residual, ret.fun)
    assert_allclose(out.params['x0'].value, ret.x[0], rtol=1e-5)
    assert_allclose(out.params['x1'].value, ret.x[1], rtol=1e-5)
예제 #29
0
def best_position(tag_kde, all_kde, coords):
    X, Y = zip(*coords)
    x_max = max(X)
    x_min = min(X)
    y_max = max(Y)
    y_min = min(Y)

    # Start in the center of the map
    start_x = x_min + (x_max-x_min)/2.
    start_y = y_min + (y_max-y_min)/2.

    # Define the function to minimize, the - is to find the maximum
    def fn(pos, tag_kde=tag_kde, all_kde=all_kde):
        result = -tag_kde(pos) / all_kde(pos)
        return result

    #print "Start:", (start_x, start_y)
    peak_fit = optimize.basinhopping(
        fn,
        (start_x, start_y),
        stepsize = 0.15,
        )

    peak_lon = peak_fit['x'][0]
    peak_lat = peak_fit['x'][1]

    return peak_lon, peak_lat, start_x, start_y
예제 #30
0
파일: optimize.py 프로젝트: evanprs/thesis
    def findOptimumCurve(self):
        """
        Uses basic downhill simplex optimization to find a curve with freqs target
    
        Returns:
            optpts (tuple): points (x,y) defining optimized curve
        """

        x, y = self.c0
        flatpts = np.append(x, y)
        if self.grade == 'coarse':
            ftol = 1.0
            xtol = 1.0
        else:
            ftol = .1
            xtol = .1
        
        if self.method == 'simplex':
            retvals = fmin(lambda pts: self.evalFitness(pts), flatpts, 
                disp=True, xtol=xtol, ftol=ftol, retall=True, maxiter=300)
       
        elif self.method == 'basinhopping':
            def test(f_new, x_new, f_old, x_old):
                c = (x_new[:len(x_new) // 2], x_new[len(x_new) // 2:])
                return not xy.curve_intersects(xy.interp(c)) # check for intersection
                # TODO - redundant - happens inside basinhopping anyways
            minimizer_kwargs = {'tol':ftol*100}
            res =  basinhopping(lambda pts: self.evalFitness(pts), flatpts, T=1,
                         accept_test=test, stepsize=20, disp=True, callback = print,
                         minimizer_kwargs=minimizer_kwargs)
            retvals = [res.x, list(res.x)]  # this is so indexing to look for xopt doesn't break
        
        else: raise ValueError("Invalid method selected")
    
        #  save the data for lata
        #  TODO - live update instead of waiting til end to write - better crash recovery
        labels = ['xopt','allvecs']
        retdict = dict(zip(labels,retvals))  # automatically ignores allvecs if absent
        retdict['fits'] = self.fits
        retdict['fqs'] = self.fqs
    
        outpts = retvals[0]
        x = outpts[:len(outpts) // 2]
        y = outpts[len(outpts) // 2:]
        self.optpts = (x, y)
        print(self.optpts)
    
        retdict['optpts'] = self.optpts # for redundancy 
        retdict['target'] = self.target
        retdict['c0'] = self.c0
        self.allvecs = retdict['allvecs']
        # TODO - this is ridiculous
    
        # isolate best case
        self.best_fit = min(self.fits)
        self.best_fq = self.fqs[self.fits.index(self.best_fit)]
        
        pickle.dump(retdict, open('vals.p','wb')) # TODO - account for overwriting
        pickle.dump(self, open('bell.b','wb'))
        return retdict
예제 #31
0
파일: model.py 프로젝트: Samreay/Barry
    def get_start(self, num_walkers=1):
        """Gets an optimised `n` starting points by calculating a best fit starting point using basinhopping"""
        self.logger.info("Getting start position")

        def minimise(scale_params):
            return -self.get_posterior(self.unscale(scale_params))

        close_default = 3
        start_random = self.get_raw_start()
        start_close = [(s + p.default * close_default) / (1 + close_default)
                       for s, p in zip(start_random, self.get_active_params())]

        self.logger.info(
            "Starting basin hopping to find a good starting point")
        res = basinhopping(
            minimise,
            self.scale(start_close),
            niter_success=3,
            niter=30,
            stepsize=0.05,
            minimizer_kwargs={
                "method": "Nelder-Mead",
                "options": {
                    "maxiter": 600
                }
            },
        )

        scaled_start = res.x
        ratio = 0.05  # 5% of the unit hypercube

        mins = np.clip(scaled_start - ratio, 0, 1)
        maxes = np.clip(scaled_start + ratio, 0, 1)

        samples = np.random.uniform(mins,
                                    maxes,
                                    size=(num_walkers, len(maxes)))

        unscaled_samples = np.array([self.unscale(s) for s in samples])
        self.logger.debug(f"Start samples have shape {unscaled_samples.shape}")

        return unscaled_samples
예제 #32
0
def best_score_search(true_labels, predictions, f):
# https://discuss.pytorch.org/t/multilabel-classification-how-to-binarize-scores-how-to-learn-thresholds/25396
# https://github.com/mratsim/Amazon-Forest-Computer-Vision/blob/46abf834128f41f4e6d8040f474ec51973ea9332/src/p_metrics.py#L15-L53
    def f_neg(threshold):
        ## Scipy tries to minimize the function so we must get its inverse
        return - f(true_labels, pd.DataFrame(predictions).values > pd.DataFrame(threshold).values.reshape(1, len(predictions[0])))
        # return - f(np.array(true_labels), pd.DataFrame(predictions).values > pd.DataFrame(threshold).values)[2]

    # print(len(predictions[0]))
    # Initialization of best threshold search
    thr_0 = [0.20] * len(predictions[0])
    constraints = [(0.,1.)] * len(predictions[0])
    def bounds(**kwargs):
        x = kwargs["x_new"]
        tmax = bool(np.all(x <= 1))
        tmin = bool(np.all(x >= 0)) 
        return tmax and tmin
    
    # Search using L-BFGS-B, the epsilon step must be big otherwise there is no gradient
    minimizer_kwargs = {"method": "L-BFGS-B",
                        "bounds":constraints,
                        "options":{
                            "eps": 0.05
                            }
                       }
    
    # We combine L-BFGS-B with Basinhopping for stochastic search with random steps
    print("===> Searching optimal threshold for each label")
    start_time = timer()
    
    opt_output = basinhopping(f_neg, thr_0,
                                stepsize = 0.1,
                                minimizer_kwargs=minimizer_kwargs,
                                niter=10,
                                accept_test=bounds)
    
    end_time = timer()
    print("===> Optimal threshold for each label:\n{}".format(opt_output.x))
    print("Threshold found in: %s seconds" % (end_time - start_time))
    
    score = - opt_output.fun
    return score, opt_output.x
예제 #33
0
def bhop_boxfit(data,
                initial_params,
                cutoff_fwhm=3,
                npoints=200,
                niter=100,
                verbose=False):
    """Use scipy basinhopping to fit a box profile convolved
    with a gaussian beam.

    Args:
        data: Tuple of (xdata, ydata) to be fitted
        initial_params: SlabscanFitParams object for this fit.
        fwhm_cutoff (float): Number of fwhm to integrate over in the
            convolution.
        npoints (int): Number of integration points in the convolution.

    Returns:
        A dict with the following keys: Time (t), minima (xmin),
        basinhopping result object (res).
    """
    xdata, ydata = data
    param_names = ('center', 'height', 'width', 'fwhm')
    params_tuple = initial_params.ndarray_of(param_names)
    param_normalizers = initial_params.ndarray_of_norms_for(param_names)

    err_func = get_oer_fit_err_func(xdata, ydata, param_normalizers,
                                    cutoff_fwhm, npoints)
    # bh_callback = get_bh_callback(param_normalizers, verbose)
    t0 = time()

    res = basinhopping(func=err_func,
                       x0=initial_params.normalized_ndarray_of(param_names),
                       T=1.0,
                       stepsize=0.1,
                       niter=niter)

    if verbose:
        print('Seconds', (time() - t0))
        print('Initial params: ')
        initial_params.pp()
        print('Final params  : ', boxfit_ps2str(res.x * param_normalizers))
    return {'t': time() - t0, 'res': res, 'xmin': res.x * param_normalizers}
예제 #34
0
def lml_opt(train_features,
            train_targets,
            test_features,
            kernel_dict,
            regularization,
            global_opt=False,
            algomin='L-BFGS-B',
            eval_jac=True):
    """Test Gaussian process predictions."""
    # Test prediction routine with linear kernel.
    N, N_D = np.shape(train_features)
    regularization_bounds = (1e-3, None)
    kdict, bounds = prepare_kernels(kernel_dict, regularization_bounds,
                                    eval_gradients, N_D)
    print(bounds)
    # Create a list of all hyperparameters.
    theta = kdicts2list(kdict, N_D=N_D)
    theta = np.append(theta, regularization)
    # Define fixed arguments for log_marginal_likelihood
    args = (np.array(train_features), np.array(train_targets), kdict,
            scale_optimizer, eval_gradients, None, eval_jac)
    # Optimize
    if not global_opt:
        popt = minimize(lml.log_marginal_likelihood,
                        theta,
                        args=args,
                        method=algomin,
                        jac=eval_jac,
                        options={'disp': True},
                        bounds=bounds)
    else:
        minimizer_kwargs = {
            'method': algomin,
            'args': args,
            'bounds': bounds,
            'jac': eval_jac
        }
        popt = basinhopping(lml.log_marginal_likelihood,
                            theta,
                            minimizer_kwargs=minimizer_kwargs,
                            disp=True)
    return popt
예제 #35
0
def fftfit(prof, template=None, **fftfit_kwargs):
    """Align a template to a pulse profile.

    Parameters
    ----------
    phase : array
        The phases corresponding to each bin of the profile
    prof : array
        The pulse profile
    template : array, default None
        The template of the pulse used to perform the TOA calculation. If None,
        a simple sinusoid is used

    Returns
    -------
    mean_amp, std_amp : floats
        Mean and standard deviation of the amplitude
    mean_phase, std_phase : floats
        Mean and standard deviation of the phase

    Other Parameters
    ----------------
    fftfit_kwargs : arguments
        Additional arguments to be passed to error calculation
    """
    prof = prof - np.mean(prof)

    nbin = len(prof)

    ph = np.arange(0, 1, 1/nbin)
    if template is None:
        template = np.cos(2 * np.pi * ph)
    template = template - np.mean(template)

    p0 = [np.max(prof), np.float(np.argmax(prof) / nbin)]

    res = basinhopping(_fft_fun_wrap, p0,
                       minimizer_kwargs={'args': ([prof, template],),
                                         'bounds': [[0, None], [0, None]]},
                       niter=10000, niter_success=200)

    return fftfit_error(ph, prof, template, res.x, **fftfit_kwargs)
예제 #36
0
def best_f2_score(true_labels, predictions):
    def f_neg(threshold):
        ## Scipy tries to minimize the function so we must get its inverse
        return -fbeta_score(
            true_labels, predictions > threshold, beta=2, average='samples')

    # Initialization of best threshold search
    thr_0 = [0.20] * 17
    constraints = [(0., 1.)] * 17

    def bounds(**kwargs):
        x = kwargs["x_new"]
        tmax = bool(np.all(x <= 1))
        tmin = bool(np.all(x >= 0))
        return tmax and tmin

    # Search using L-BFGS-B, the epsilon step must be big otherwise there is no gradient
    minimizer_kwargs = {
        "method": "L-BFGS-B",
        "bounds": constraints,
        "options": {
            "eps": 0.05
        }
    }

    # We combine L-BFGS-B with Basinhopping for stochastic search with random steps
    print("===> Searching optimal threshold for each label")
    start_time = timer()

    opt_output = basinhopping(f_neg,
                              thr_0,
                              stepsize=0.1,
                              minimizer_kwargs=minimizer_kwargs,
                              niter=10,
                              accept_test=bounds)

    end_time = timer()
    print("===> Optimal threshold for each label:\n{}".format(opt_output.x))
    print("Threshold found in: %s seconds" % (end_time - start_time))

    score = -opt_output.fun
    return score, opt_output.x
예제 #37
0
def get_xhat(locs, N, k, inverse, pdf_ref, pdf, pdf_metric):
    '''Compute xhat for given coefficient locs using basinhopping.

    Parameters
    ----------
    locs : array_like
        Coefficient location indices.
    N : int
        Length of the desired signal (also number of coefficients in
        total).
    k : int
        Desired sparsity level.
    inverse : callable
        Inverse sparsifying transform.
    pdf_ref : array_like
        Reference pdf of the prior to compare against.
    pdf : callable
        Function that estimates pixel intensity distribution.
    pdf_metric : callable
        Function that returns the distance between pdfs.

    Returns
    -------
    xhat : array_like
        Inverse transform of coeffs.
    locs : array_like
        Indices of non-zero coefficients.
    coeffs : array_like
        Coefficients of xhat.
    '''

    c0 = np.zeros(N)
    ck = np.zeros(k)
    res = basinhopping(obj,
                       ck,
                       minimizer_kwargs={
                           'args': (N, locs, inverse, pdf_ref, pdf, pdf_metric)
                       })
    c0[locs] = res['x']
    xhat = inverse(c0)
    xhat /= np.max(np.abs(xhat)) + np.finfo('float').eps
    return (xhat, locs, res['x'])
예제 #38
0
def get_inverse(x, y, z):
    ''' Funtion to get inverse kinematics
    -----------------INPUT---------------
    x,y,z = target end coordinates
    ----------------RETURN---------------
    Returns list of r(theta[1]),2,3,4'''

    b = (-150, 150)  # Arbitrary bounds for now
    bnds = (b, (-75, 70), (-95, 75), (-90, 45)
            )  # Bounds for r(theta[2]), r(theta[3]), theta4

    # cons = [
    # {'type': 'eq', 'fun': end_constraint}]  # Define constraint type and function to feed into scipy  constraints

    def _get_theta(x, y):
        '''Function to get r(theta[1])
        '''
        try:
            t1 = degrees(arctan(y / x))
            return t1
        except ZeroDivisionError:  # Catch Error for when y=0 raise ZeroDivisionError
            return 0

    # t1 = _get_theta(x, y)
    x0 = array([0.3, 0.3, 0.3, 0.3])
    cons = {'type': 'eq', 'fun': end_constraint}
    minimizer_kwargs = {
        "method": "SLSQP",
        "args": (x, y, z),
        "bounds": bnds,
        "constraints": cons
    }
    # sol = minimize(objective, x0, args=(x, y, z), method='SLSQP', constraints=[cons],bounds=bnds,options={'disp': True})
    # sol = minimize(objective, x0, args=(x, y, z), method='SLSQP',bounds=bnds,options={'disp': True})
    sol = basinhopping(objective,
                       x0,
                       niter=50,
                       minimizer_kwargs=minimizer_kwargs,
                       disp=False)
    angles = sol.x

    return angles
예제 #39
0
def best_eer(val_scores, utt2len, utt2label, key_list):
    def f_neg(threshold):
        ## Scipy tries to minimize the function
        return utt_eer(val_scores, utt2len, utt2label, key_list, threshold)

    # Initialization of best threshold search
    thr_0 = [0.20] * 1  # binary class
    constraints = [(0., 1.)] * 1  # binary class

    def bounds(**kwargs):
        x = kwargs["x_new"]
        tmax = bool(np.all(x <= 1))
        tmin = bool(np.all(x >= 0))
        return tmax and tmin

    # Search using L-BFGS-B, the epsilon step must be big otherwise there is no gradient
    minimizer_kwargs = {
        "method": "L-BFGS-B",
        "bounds": constraints,
        "options": {
            "eps": 0.05
        }
    }

    # We combine L-BFGS-B with Basinhopping for stochastic search with random steps
    logger.info("===> Searching optimal threshold for each label")
    start_time = timer()

    opt_output = basinhopping(f_neg,
                              thr_0,
                              stepsize=0.1,
                              minimizer_kwargs=minimizer_kwargs,
                              niter=10,
                              accept_test=bounds)

    end_time = timer()
    logger.info("===> Optimal threshold for each label:\n{}".format(
        opt_output.x))
    logger.info("Threshold found in: %s seconds" % (end_time - start_time))

    score = opt_output.fun
    return score, opt_output.x
예제 #40
0
 def solve(self,
           init_value_sampler=None,
           n_init=20,
           n_iters=1000,
           verbose=True,
           multiprocessing=0):
     assert self.target_stats is not None, "self.target_stats should have been set before solve."
     bound = np.array([[-0.25, 0.1, 0, -0.1, 20], [0, 1, 0.2, 1, 75]])
     if init_value_sampler is None:
         init_value_sampler = lambda n: np.random.uniform(
             low=bound[0, :], high=bound[1, :], size=[n, 5])
     init = init_value_sampler(n_init)
     result = np.zeros([n_init, 5])
     if multiprocessing:
         params = (self, [init[i] for i in range(n_init)],
                   [i + 1 for i in range(n_init)], True, n_iters, bound)
         pool = mp.Pool(multiprocessing)
         result = pool.map(SSTSolver.solve_multiprocess, params)
         pool.close()
     else:
         for i in range(n_init):
             init_value = init[i, :]
             if verbose:
                 print(
                     f'{i + 1}/{n_init} is starting. Initial value: {init_value}.'
                 )
                 time_stamp = time.time()
             temp_value = basinhopping(self.eval_and_compute_loss,
                                       x0=init_value,
                                       niter=n_iters,
                                       stepsize=0.01).x
             result[i] = minimize(self.eval_and_compute_loss,
                                  x0=temp_value,
                                  method='Nelder-Mead').x
             if verbose:
                 time_elapsed = time.time() - time_stamp
                 formatted_time_elapsed = f'{int(time_elapsed // 3600)}:{int(time_elapsed % 3600 // 60)}:{time_elapsed % 60}'
                 print(
                     f'{i + 1}/{n_init} has completed. Optimized value: {result[i]}. Time elapsed: {formatted_time_elapsed}'
                 )
     self.result = result
     return result
예제 #41
0
def basinhopping_mode(info):
	prebuild(info['c_source_list'], info['cc_options'], info['kernel_file'])
	make_measure_script(int(info['threads']))

	#minimizer_kwargs = {"method": "BFGS"}
	#ret = basinhopping(testfunc, [1.], minimizer_kwargs=minimizer_kwargs, niter=200)
	#print(ret.x)
	#print(ret.fun)

	#minimizer_kwargs = {"args": 1.0}
	#ret = basinhopping(testfunc, [1.], minimizer_kwargs=minimizer_kwargs, stepsize=1)

	#rranges = (slice(-4, 4, 0.25), slice(-4, 4, 0.25))
	#ret = brute(testfunc, rranges, full_output=True, finish=scipy.optimize.fmin)
	#print("global minimum: x = %.4f, f(x0) = %.4f" % (ret.x, ret.fun))
	#print(ret)

	#x0 = [1.3, 0.7, 0.8, 1.9, 1.2]
	#res = minimize(rosen, x0, method='Nelder-Mead', tol=1e-6)
	#print(res)

	#scipy.optimize.basinhopping(func, x0, niter=100, T=1.0, stepsize=0.5, minimizer_kwargs=None, take_step=None, accept_test=None, callback=None, interval=50, disp=False, niter_success=None, seed=None)

	#x0 = [10, 10, 10, 10, 10]
	#res = minimize(rosen, x0, method='Nelder-Mead', tol=1e-6)
	#res = minimize(rosen, x0, args=(), method='Nelder-Mead', jac=None, hess=None, hessp=None, bounds=None, constraints=(), tol=None, callback=None, options=None)
	#print(res)

	#params = (2, 3, 7)
	#rranges = (slice(-4, 4, 0.25), slice(-4, 4, 0.25))
	#ret = optimize.brute(testfunc, rranges, args=params, full_output=True, finish=optimize.fmin)
	#print(ret)

	#x0 = [32, 32, 32]
	#ret = basinhopping(testfunc, x0, niter=30, T=1.0, stepsize=10, minimizer_kwargs=None, take_step=None, accept_test=None, callback=None, interval=50, disp=True, niter_success=None, seed=None)
	#ret = basinhopping(testfunc, x0, niter=300, T=1.0, stepsize=10, minimizer_kwargs=None, take_step=None, accept_test=None, callback=None, interval=50, disp=True, niter_success=None, seed=None)
	#print(ret)

	minimizer_kwargs = {"args": (info, info['remote'], int(info['repetition']), info['huge_num'])}
	#ret = basinhopping(testfunc, [32,32,32], niter=300, T=1.0, stepsize=10, minimizer_kwargs=minimizer_kwargs, take_step=None, accept_test=None, callback=None, interval=50, disp=True, niter_success=None, seed=None)
	ret = basinhopping(evalfunc, [32,32,32], niter=10, T=1.0, stepsize=10, minimizer_kwargs=minimizer_kwargs, take_step=None, accept_test=None, callback=None, interval=50, disp=True, niter_success=None, seed=None)
	print(ret)
예제 #42
0
    def custom_basinhopping(self):
        self.successful_results = np.empty(self.n)
        self.successful_results[:] = np.nan
        self.current_success_number = 0

        minimizer_config = {
            "method": 'BFGS',
            "options": {
                'gtol': self.confidence
            }
        }

        output = basinhopping(self.to_minimise,
                              self.initial,
                              niter=1000,
                              minimizer_kwargs=minimizer_config,
                              take_step=self.step_function,
                              callback=self.callback_function)

        return output.x
예제 #43
0
 def run(self, wrapper='basinhopping'):
     # maximize likelihood function by scipy.optimize.minimize function
     minimizer_kwargs = {"bounds": self.paramsBounds}
     if wrapper is 'minimize':
         self.result = minimize(self.chi2, self.paramsInit,
                                **minimizer_kwargs)
     if wrapper is 'basinhopping':
         self.result = basinhopping(self.chi2,
                                    self.paramsInit,
                                    minimizer_kwargs=minimizer_kwargs)
     self.paramsMax = self.result.x
     self.diagnostics = {
         'paramsInit': self.paramsInit,
         'paramsMax': self.paramsMax,
         'paramsBounds': self.paramsBounds,
         'paramsNames': self.paramsNames,
         'result': self.result,
         'Ndim': self.Ndim
     }
     return self.diagnostics
    def train(self):
        
        f=self.create_lossOdeint()
        bnds = ((1e-12, .2),(1e-12, .2),(5,len(self.data)-5),(1e-12, .2),
        (1/120, .4),(1e-12, .4),(1e-12, .4),(1e-12, .4),(1e-12, .4),(1e-12, .4))# your bounds
        x0 = [1e-3, 1e-3, 0, 1e-3, 1/120, 1e-3, 1e-3, 1e-3, 1e-3, 1e-3]
        minimizer_kwargs = { "method": "L-BFGS-B","bounds":bnds }
        optimal = basinhopping(f, x0, minimizer_kwargs=minimizer_kwargs,niter=10,disp=True)  
        
        point = self.s_0, self.start_date, self.i_0, self.d_0, self.r_0, self.startNCases, self.weigthCases, \
                    self.weigthRecov, self.weigthDeath
        
        strSave='{}, {}, '.format(self.country, abs(optimal.fun))
        strSave=strSave+', '.join(map(str,point))
        self.append_new_line('./results/history_'+self.country+str(self.version)+'.csv', strSave) 
        
        del self, f, strSave, point
        gc.collect()

        return optimal.fun
예제 #45
0
def test_Wang_LAP():
	"""Test the least action path method from Jin Wang and colleagues (http://www.pnas.org/cgi/doi/10.1073/pnas.1017017108)

	Returns
	-------

	"""
	x1_end=1
	x2_end=0
	x2_init=1.5
	x1_init=1.5
	N = 20

	x1_input=np.arange(x1_init, x1_end + (x1_end-x1_init)/N, (x1_end-x1_init)/N)
	x2_input=np.arange(x2_init, x2_end + (x2_end-x2_init)/N, (x2_end-x2_init)/N)
	X_input=np.vstack((x1_input, x2_input))

	dyn.tl.least_action(X_input, F=F, D=0.1, N=20, lamada_=1)
	res = optimize.basinhopping(dyn.tl.least_action, x0=X_input,minimizer_kwargs={'args': (2, F, 0.1, 20, 1)})
	res
    def start(self, seq_to_optimize):
        """
        @param: The starting list of values to be optimised.
        Note that the value bounds per element are fixed.
        """
        x0 = seq_to_optimize
        assert isinstance(x0, list) and len(x0) > 0 and all([isinstance(x, Number) for x in x0]),  \
            "Input sequence should be a list of type numbers.Number."

        # the bounds
        xmin = [1.0] * len(x0)
        xmax = [2.0] * len(x0)

        # rewrite the bounds in the way required by L-BFGS-B
        bounds = [(low, high) for low, high in zip(xmin, xmax)]

        # use method L-BFGS-B because the problem is smooth and bounded
        minimizer_kwargs = dict(method="L-BFGS-B", bounds=bounds)
        res = basinhopping(self.f, x0, minimizer_kwargs=minimizer_kwargs)
        return res
예제 #47
0
 def trainMinimizer(self, input, optimizer):
     flattenedWeights = self.__flattenNetwork()
     print(flattenedWeights.shape)
     # Minimize the cost function
     res = optimize.basinhopping(func=self.__loss,
                                 x0=flattenedWeights,
                                 disp=True,
                                 minimizer_kwargs={
                                     "args": (input, input),
                                     "method": "L-BFGS-B",
                                     "options": {
                                         "disp": True
                                     }
                                 })
     # res = adam(fun=self.__loss, x0=flattenedWeights, args=(input, input))
     # Rebuild the weights matrix
     self.__rebuildNetwork(flattenedWeights)
     # Error of the cost function
     error = res.fun
     print(f'Final loss is {error}')
예제 #48
0
 def test_all_nograd_minimizers(self):
     # test 2d minimizations without gradient.  Newton-CG requires jac=True,
     # so not included here.
     i = 1
     methods = [
         'CG', 'BFGS', 'L-BFGS-B', 'TNC', 'SLSQP', 'Nelder-Mead', 'Powell',
         'COBYLA'
     ]
     minimizer_kwargs = copy.copy(self.kwargs_nograd)
     for method in methods:
         minimizer_kwargs["method"] = method
         res = basinhopping(func2d_nograd,
                            self.x0[i],
                            minimizer_kwargs=minimizer_kwargs,
                            niter=self.niter,
                            disp=self.disp)
         tol = self.tol
         if method == 'COBYLA':
             tol = 2
         assert_almost_equal(res.x, self.sol[i], decimal=tol)
예제 #49
0
def main():

    results = {}
    results['shgo'] = optimize.shgo(eggholder, bounds)

    results['DA'] = optimize.dual_annealing(eggholder, bounds)

    results['DE'] = optimize.differential_evolution(eggholder, bounds)

    # basinhopping requires initiual guess x0
    x0 = np.array([randint(-512, 513), randint(-512, 513)])
    kwargs = {'bounds': bounds}
    results['BH'] = optimize.basinhopping(eggholder, x0, minimizer_kwargs=kwargs)

    # shgo has a second method, which returns all local minima rather
    # than only what it thinks is the global minimum:
    results['shgo_sobol'] = optimize.shgo(eggholder, bounds, n=200, iters=5,
                                          sampling_method='sobol')

    plot_minima(results)
예제 #50
0
    def _fit_basinhopping(self, X, y, X_val, Y_val, activations, deltas,
                          coef_grads, intercept_grads, layer_units):
        # Store meta information for the parameters
        self._coef_indptr = []
        self._intercept_indptr = []
        start = 0

        # Save sizes and indices of coefficients for faster unpacking
        for i in range(self.n_layers_ - 1):
            n_fan_in, n_fan_out = layer_units[i], layer_units[i + 1]

            end = start + (n_fan_in * n_fan_out)
            self._coef_indptr.append((start, end, (n_fan_in, n_fan_out)))
            start = end

        # Save sizes and indices of intercepts for faster unpacking
        for i in range(self.n_layers_ - 1):
            end = start + layer_units[i + 1]
            self._intercept_indptr.append((start, end))
            start = end

        # Run Basinhopping
        packed_coef_inter = _pack(self.coefs_, self.intercepts_)

        minimizer_kwargs = {
            'method': 'L-BFGS-B',
            'args': (X, y, activations, deltas, coef_grads, intercept_grads)
        }

        result = optimize.basinhopping(x0=packed_coef_inter,
                                       T=self.T,
                                       stepsize=self.stepsize,
                                       func=self._loss_func,
                                       niter=self.max_iter,
                                       callback=self._callback,
                                       minimizer_kwargs=minimizer_kwargs)

        optimal_parameters = result.x
        self.loss = result.fun

        self._unpack(optimal_parameters)
예제 #51
0
    def optimize(self, x0=None, nhops=None, polish=1e-6):
        """
        Perform the optimization.

        Parameters
        ----------
        x0 : ndarray, None
            Initial optimization vector. If None, use a random vector.
        nhops : int, None
            The number of basin hops to perform while optimizing. If None,
            hop a number of times equal to the dimension of the conditioning
            variable(s).
        polish : float
            If `polish` > 0, the found minima is improved by removing small
            components and optimized with lower tolerances.
        """
        from scipy.optimize import basinhopping

        if x0 is not None:
            x = x0
        else:
            x = self.construct_random_initial()

        if nhops is None:
            nhops = self._default_hops

        minimizer_kwargs = {'method': 'L-BFGS-B',
                            'bounds': [(0, 1)] * x.size,
                            }

        res = basinhopping(func=self.objective,
                           x0=x,
                           minimizer_kwargs=minimizer_kwargs,
                           niter=nhops,
                           accept_test=accept_test,
                           )

        self._optima = res.x

        if polish:
            self._polish(cutoff=polish)
예제 #52
0
def optimize_bis(params, args_, distance, I_A, I_B, I_C, mybounds, obs, cumul):
    """
        Optimise sur la fonction objectif avec l'algorithme du recuit simule
        de gradient et renvoie le resultat de cette opt. 
    
        Args
        ----
        params : list
            Contient les parametres fixes du modele de la fonction 
            mdl.integrateCecidoInflo
        args_ : list
            Contient une initialisation des parametres a estimer du modele de 
            la fonction mdl.integrateCecidoInflo           
        distance : fonction 
            Fonction objectif choisie
        I_ABC : ndarray
            Nombre d'inflorescences vivantes au cours du temps dans chaque 
            sous parcelle
        mybounds :
            Bornes des parametres a estimer
        obs : bool
            Vaut 0 si on fait l'optimisation sur toutes les valeurs
            journalieres, 1 si sur les valeurs observees
        cumul : bool
            Vaut 1 si on fait l'optimisation sur les valeurs cumulees au cours
            du temps. 0 sinon. 
            
        Returns 
        -------
        res : float
            Resultat de l'optimisation
    """

    res = basinhopping(objectif,
                       params,
                       minimizer_kwargs={
                           "args": (args_, distance, I_A, I_B, I_C, obs, cumul)
                       },
                       accept_test=mybounds)

    return res
예제 #53
0
def main():
    args = create_cmd_args()
    if not args:
        args = get_input_args()
    # Select subjects for fitting
    if args.subject == 'all':
        # Fit model to all subjects
        subjects = range(100)
    elif args.subject[0] == '-':
        # For leave-one-subject-out cross validation
        subjects = [int(x) for x in range(100) if x != -int(args.subject)]
    else:
        # For single subject fitting
        subjects = [int(args.subject)]
    notes = 'na'
    bounds = model_bounds[args.training_model]
    simulator, trials = build_simulator(args.experiment_name, subjects)
    logfile = 'fitting_log_' + ymdhms() + '_' + str(args.subject) + '.txt'
    with open(logfile, 'a') as file:
        file.write(f'experiment_name: {args.experiment_name}\nsubject: {args.subject}\ntrials: {trials}\n'
            f'approach_experiment: {args.approach_experiment}\n'
            f'approach_model: {args.approach_model}, avoid_model: {args.avoid_model}\n'
            f'training_model: {args.training_model}\n'            
            f'method: {args.method}\nbounds: {bounds}\nt_start: {args.t_start}, t_end: {args.t_end}\n'
            f'preferred speed: {args.ps}\nnotes: {notes}\n')              
    if args.method == 'nelder-mead':
        res = optimize.minimize(error, x0, args=(simulator, trials, logfile, args), method='nelder-mead',
                        options={'xatol': 1e-6, 'disp': True, 'adaptive': True})
    elif args.method == 'shgo':
        res = optimize.shgo(error, bounds, args=(simulator, trials, logfile, args))
    elif args.method == 'dual_annealing':
        res = optimize.dual_annealing(error, bounds, args=(simulator, trials, logfile, args),
                                      initial_temp=25000)
    elif args.method == 'differential_evolution':
        res = optimize.differential_evolution(error, bounds, args=(simulator, trials, logfile, args),
                                    updating='immediate', workers=1)
    elif args.method == 'basinhopping':
        res = optimize.basinhopping(error, bounds, minimizer_kwargs={'args':(simulator, trials, logfile, args)})
    with open(logfile, 'a') as file:
        file.write(f'The optimal x: {res.x}')
    print(res.x)
예제 #54
0
    def findminglob(self,
                    pvals,
                    f_name='None',
                    email=0,
                    meth='TNC',
                    bnds='strict',
                    it=300,
                    stpsize=0.5,
                    temp=1.,
                    displ=True,
                    maxits=3000):
        """Function which minimizes 4DVAR cost fn. Takes an initial state
        (pvals), an obs dictionary, an obs error dictionary, a dataClass and
        a start and finish time step.
        """
        p = np.array(pvals.tolist()[0], dtype=np.float)
        self.xb = p
        self.names = pvals.dtype.names
        if bnds == 'strict':
            bnds = self.opt_bnds(self.dC.bnds)
        else:
            bnds = bnds

        findmin = spop.basinhopping(self.cost,
                                    p,
                                    niter=it,
                                    minimizer_kwargs={
                                        'method': meth,
                                        'bounds': bnds,
                                        'jac': self.gradcost2,
                                        'options': {
                                            'maxiter': maxits
                                        }
                                    },
                                    stepsize=stpsize,
                                    T=temp,
                                    disp=displ)
        if f_name != 'None':
            if email == 1:
                my_email.send_email(findmin, f_name)
        return findmin
예제 #55
0
def removecoin(inputimage, startguess=[0.5, 0.5, 0.1], method='local'):
    if method == 'local':
        x_opt = fmin(circlecost, [0.4, 0.7, 0.1], args=(inputimage))
        print(x_opt)
        outermean = circlecost_outermean(x_opt, totuple(inputimage))
        outputimage = drawcircle(inputimage, x_opt[0], x_opt[1], x_opt[2], 0)

    if method == 'global':
        minimizer_kwargs = {
            'method': 'Nelder-Mead',
            'args': totuple(inputimage)
        }
        result = basinhopping(circlecost, [0.4, 0.7, 0.1],
                              minimizer_kwargs=minimizer_kwargs)
        x_opt = result.x
        print(x_opt)
        outermean = circlecost_outermean(x_opt, totuple(inputimage))
        outputimage = drawcircle(inputimage, x_opt[0], x_opt[1], x_opt[2], 0)

    imshow(outputimage)
    return outputimage
예제 #56
0
def MSM_original(data,kbar,startingvals):
    """
    Implementation of MSM close to the original one in Matlab

    data = data to use for likelihood calculation
    kbar = number of multipliers in the model
    startingvals = 4 parameters of the MSM, recommended to set to None so that the function can
    compute optimal starting values
    """
    A_template = T_mat_template(kbar)
    startingvals, LLs,ordered_parameters = starting_values(data,startingvals,kbar,A_template)
    bnds = ((1.001,50),(1,1.99),(1e-3,0.999999),(1e-4,5))
    minimizer_kwargs = dict(method = "L-BFGS-B",bounds = bnds,args = (kbar,data,A_template,None))
    res = opt.basinhopping(likelihood,x0 = startingvals,minimizer_kwargs = minimizer_kwargs,
                           niter = 1)
    parameters,LL,niters,output = res.x,res.fun,res.nit,res.message
    #print(LL,parameters)
    LL, LLs,pi_t = likelihood(parameters,kbar,data,A_template,None,2)
    LL = -LL
    
    return(LL,LLs,parameters,pi_t)
예제 #57
0
    def fit(self, X, Y, sY):
        # first we need to build a distance matrix
        self.sY = sY
        if type(self.sY) is float:
            self.sY = self.Y * 0 + sY
        self.X = X
        self.Y = Y
        self.dX1X1 = dmat(X.reshape(-1, 1))
        fitter = basinhopping(func=self.marginal_likelihood,
                              x0=np.array([self.mu, self.sigma, self.length]),
                              niter=13)
        self.mu = fitter.x[0]
        self.sigma = abs(fitter.x[1])
        self.length = abs(fitter.x[2])

        self.tY = (Y - self.mu).reshape(-1, 1)
        self.KX1X1 = rbf_kern(
            self.dX1X1, self.sigma**2.0,
            self.length) + np.eye(self.dX1X1.shape[0]) * self.sY
        self.KX1X1_inv = np.linalg.pinv(self.KX1X1)
        self.KiY = self.KX1X1_inv.dot(self.tY)
예제 #58
0
파일: sampling.py 프로젝트: gnwong/pydemic
    def basinhopping(self, x0=None, bounds=None, **kwargs):
        if x0 is None:
            x0 = [par.guess for par in self.fit_parameters]
        if bounds is None:
            bounds = [par.bounds for par in self.fit_parameters]

        xmin = np.array(bounds)[:, 0]
        xmax = np.array(bounds)[:, 1]

        def bounds_enforcer(**kwargs):
            x = kwargs["x_new"]
            tmax = bool(np.all(x <= xmax))
            tmin = bool(np.all(x >= xmin))
            return tmax and tmin

        from scipy.optimize import basinhopping
        sol = basinhopping(
            self.minimizer, x0, accept_test=bounds_enforcer, **kwargs
        )
        sol.x = dict(zip([par.name for par in self.fit_parameters], sol.x))
        return sol
예제 #59
0
파일: __init__.py 프로젝트: pmaes/pySTAD
def run_basinhopping(cf, mst, links_to_add, highD_dist_matrix, debug = False):
    '''
    Returns new graph.
        cf = cost_function
        start = start x
    '''
    disp = False
    if debug: disp = True
    start = len(mst.es())
    if debug: print(f"Starting optimization at {start}")
    minimizer_kwargs = {'args':{'graph':mst,'list_of_links_to_add':links_to_add,'highD_dist_matrix':highD_dist_matrix,'debug':debug}}
    result = optimize.basinhopping(
        cf,
        start,
        disp=disp,
        minimizer_kwargs=minimizer_kwargs
    )
    if debug:
        print(result)
    g = add_links_to_graph(mst, highD_dist_matrix, links_to_add, result.x[0])
    return g
예제 #60
0
파일: _gp.py 프로젝트: tsilifis/quinoa
	def argmaxvar(self, bounds = (-4., 4.)):
		assert isinstance(bounds, tuple)

		def neg_var_x(x):
			assert x.shape[0] == self._kern._input_dim
			X_test = x.reshape(1, x.shape[0])
			K_test = self._kern.cov(self._X, X_test)
			K_test_grad = self._kern.d_cov_d_X(X_test, self._X)[:,0,:].T
			v = np.linalg.solve(self._chol, K_test)
			v_grad = np.linalg.solve(self._chol, K_test_grad)

			return - self.predict(X_test)[1][0,0], 2. * np.dot(v_grad.T, v)

		bnds = (bounds,) * self._kern._input_dim
		#res = minimize(neg_var_x, np.random.normal(size = (self._kern._input_dim,)), method = 'L-BFGS-B', jac = True, bounds = bnds, options = {'ftol': 1e-16, 'gtol': 1e-16, 'maxiter' : 1000})
		#res = minimize(neg_var_x, x0, method = 'TNC', jac = True)
		
		# --- Using optimize.basinhopping
		minimizer_kwargs = {'method': 'L-BFGS-B', 'jac': True, 'bounds': bnds, 'options' : {'ftol': 1e-16, 'gtol': 1e-16, 'maxiter' : 1000}}
		res = basinhopping(neg_var_x, np.random.uniform(size = (self._kern._input_dim,))- 0.5, minimizer_kwargs = minimizer_kwargs) 
		return res.x