Exemplo n.º 1
0
    print('    fx = {}'.format(fx))

    # Save to solution history
    x_hist = args[0]
    fx_hist = args[1]
    x_hist.append(x)
    fx_hist.append(fx)


bounds = [(0, 10) for i in range(10)]

x_hist = list()
fx_hist = list()
args = (x_hist, fx_hist)

res = minimize(fun, bounds, args=args, callback=callback, workers=1)

# Print optimization result
print(res)

if __name__ == "__main__":
    # Plot solution history
    fig, ax = plt.subplots(2, 1, sharex=True)
    ax[0].plot(x_hist)
    ax[0].set_title('x')
    ax[1].plot(fx_hist)
    ax[1].set_title('f(x) = np.sum(x ** 2) + random.random()')
    ax[1].set_xlabel('Generation')

    plt.show()
Exemplo n.º 2
0
def con_minimize(
    fun, bounds, constr=(), x0=None, args=(), callback=None, options={}, workers=None
):
    """Constrained minimization of `fun` using Genetic Algorithm.

    This function is a wrapper over modestga.minimize().
    The constraints are defined as a tuple of functions
    (`fcon1(x, *args)`, `fcon2(x, *args)`, `...`).
    The algorithm searches for a solution minimizing
    `fun(x, *args)` and satisfying the conditions
    (`fcon1(x, *args) >= 0`, `fcon2(x, *args) >= 0`, `...`).

    `callback` arguments: `x`, `fx`, `ng`, `*args`.
    `fx` is the function value at the generation `ng`.

    Returns an optimization result object with the following attributes:
    - x - numpy 1D array, optimized parameters,
    - message - str, exit message,
    - ng - int, number of generations,
    - fx - float, final function value.

    :param fun: function to be minimized
    :param bounds: tuple, parameter bounds
    :param constr: tuple, functions defining constraints
    :param x0: numpy 1D array, initial parameters
    :param args: tuple, positional arguments to be passed to `fun` and to `fcon`
    :param callback: function, called after every generation
    :param options: dict, GA options
    :param workers: int, number of processes to use (will use all CPUs if None)
    :return: OptRes, optimization result
    """
    # Wrap cost function with constraints
    def fun_soft_con(x, *augmented_args):
        # Unpack constraints and arguments
        fcore = augmented_args[0]  # Function to be minimized
        fcons = augmented_args[1]  # Constraints
        user_args = augmented_args[2:]  # Arguments

        # Evaluate core function
        ycore = fcore(x, *user_args)

        # Initialize penalty
        penalty = 0.0

        # Update penalty
        # (the more negative fcon() is, the higher penalty)
        for f in fcons:
            ycon = np.max([f(x, *user_args) * -1.0, 0.0])
            pscale = ycore / (ycon + 1e-6)
            penalty += ycon * pscale

        return ycore + penalty

    # Run minimization
    augmented_args = (fun, constr, *args)

    res = modestga.minimize(
        fun=fun_soft_con,
        bounds=bounds,
        x0=x0,
        args=augmented_args,
        callback=callback,
        options=options,
        workers=workers,
    )

    # Extend result with contraint violation info
    res.constr = [fcon(res.x, *args) for fcon in constr]

    return res
Exemplo n.º 3
0
    def estimate(self):
        # Objective function
        self.logger.debug("Instantiating ObjectiveFun")
        objective_fun = ObjectiveFun(self.fmu_path, self.inp, self.known,
                                     self.est, self.ideal, self.ftype)
        self.logger.debug(f"ObjectiveFun: {objective_fun}")

        # Initial guess
        x0 = [MODESTGA.scale(x.value, x.lo, x.hi) for x in self.est]
        self.logger.debug("modestga x0 = {}".format(x0))

        # Save initial guess in summary
        row = pd.DataFrame(index=[0])
        for x, c in zip(x0, MODESTGA.TMP_SUMMARY.columns):
            row[c] = x
        row[MODESTGA.ERR] = np.nan
        row[MODESTGA.METHOD] = MODESTGA.NAME
        MODESTGA.TMP_SUMMARY = MODESTGA.TMP_SUMMARY.append(row,
                                                           ignore_index=True)

        # Parameter bounds
        b = [(0.0, 1.0) for x in self.est]
        self.logger.debug(f"bounds = {b}")

        out = minimize(
            objective_fun,
            bounds=b,
            x0=x0,
            args=(),
            callback=MODESTGA._callback,
            options=self.options,
            workers=self.workers,
        )

        self.logger.debug(f"out = {out}")
        outx = [
            MODESTGA.rescale(x, ep.lo, ep.hi)
            for x, ep in zip(out.x.tolist(), self.est)
        ]

        self.logger.debug("modestga x = {}".format(outx))

        # Update summary
        self.summary = MODESTGA.TMP_SUMMARY.copy()
        self.summary.index += 1  # Adjust iteration counter
        self.summary.index.name = MODESTGA.ITER  # Rename index

        # Update error
        self.summary[MODESTGA.ERR] = list(
            map(objective_fun,
                self.summary[[x.name for x in self.est]].values))

        for ep in self.est:
            name = ep.name
            # list(map(...)) - for Python 2/3 compatibility
            self.summary[name] = list(
                map(lambda x: MODESTGA.rescale(x, ep.lo, ep.hi),
                    self.summary[name]))  # Rescale

        # Reset temp placeholder
        MODESTGA.TMP_SUMMARY = pd.DataFrame(columns=self.summary_cols)

        # Return DataFrame with estimates
        par_vec = outx
        par_df = pd.DataFrame(columns=[x.name for x in self.est], index=[0])
        for col, x in zip(par_df.columns, par_vec):
            par_df[col] = x

        return par_df
Exemplo n.º 4
0
    def task(self, **kwargs):
        """
        Args:
            kwargs (dict, optional):
                keyword arguments:

                bounds (2D array_like of float):
                    array of min/max pairs for optimization constraints etc

                method (str):
                    optimization method

                y (1D array_like of float):
                    target of inverse problem (only if 'self' is of type
                    Inverse), shape: (nOut)

        Returns:
            (2-tuple of 1D array of float):
                model input at optimum and corresponding model output,
                shape: (nInp) and shape: (nOut)
        Note:
            - requires initial point(s) self.x for getting input number: nInp
            - if inverse problem solution then self.y is required as target
        """
        Base.task(self, **kwargs)

        bounds = kwargs.get('bounds', None)

        method = self.kwargsGet(kwargs, ('method', 'methods'), None)
        if method is not None:
            self.method = method
        self.write('+++ method: ', self.method)

        # sets target for Inverse
        if type(self).__name__ in ('Inverse'):
            y = kwargs.get('y', None)
            self.y = np.atleast_1d(y) if y is not None else self.y

        assert self.model is not None

        xIni = self.x.copy()
        self._history = []
        for x0 in xIni:
            self._trialHistory = []

            if self.method.startswith('bas'):
                res = scipy.optimize.basinhopping(func=self.objective,
                                                  x0=x0,
                                                  niter=100,
                                                  T=1.0,
                                                  stepsize=0.5,
                                                  minimizer_kwargs=None,
                                                  take_step=None,
                                                  accept_test=None,
                                                  callback=None,
                                                  interval=50,
                                                  disp=False,
                                                  niter_success=None)
                x = np.atleast_1d(res.x)
                y = np.atleast_1d(res.fun)
                success = 'success' in res.message[0]

            elif self.method.startswith('dif'):
                if bounds is None:
                    bounds = [(-10, 10)] * len(x0)
                res = scipy.optimize.differential_evolution(
                    func=self.objective,
                    bounds=bounds,
                    strategy='best1bin',
                    maxiter=None,
                    popsize=15,
                    tol=0.01,
                    mutation=(0.5, 1),
                    recombination=0.7,
                    seed=None,
                    disp=False,
                    polish=True,
                    init='latinhypercube')
                x, y = res.x, res.fun
                success = res.success

            elif self.method == 'ga':
                validKeys = ['tol', 'options', 'bounds']
                kw = {k: kwargs[k] for k in validKeys if k in kwargs}
                res = mg.minimize(
                    fun=self.objective,
                    x0=x0,
                    # method=self.method, # TODO .
                    **kw)
                x, y = np.atleast_1d(res.x), np.atleast_1d(res.fx)
                success = True  # TODO .
            else:
                res = scipy.optimize.minimize(
                    fun=self.objective,
                    x0=x0,
                    method=self.method,
                )
                x = np.atleast_1d(res.x)
                kw = self.kwargsDel(kwargs, 'x')
                y = self.model.predict(x=res.x, **kw)[0]
                success = res.success

            self._history.append(self._trialHistory)

        if not success:
            self.write('+++ error message: ', res.message)
            x = [None] * x.size
            y = [None] * y.size

        nTrial = len(self._history[0])
        if nTrial > 1:
            self.write('+++ Optima of all trials:')
            for iTrial, history in enumerate(self._history):
                self.write('    [' + str(iTrial) + '] x: ', history[-1][0],
                           '\n        y: ', history[-1][1],
                           '\n        objective: ', history[-1][2])

            # self._history[iTrial][iLast=-1][jObj=2] -> list of best obj.
            finalObjectives = [hist[-1][2] for hist in self._history]
            if self.__class__.__name__ == 'Inverse':
                absFinalObj = np.abs(finalObjectives)
                iTrialBest = finalObjectives.index(min(absFinalObj))
            else:
                iTrialBest = finalObjectives.index(min(finalObjectives))

        else:
            iTrialBest = 0

        # y: self._history[iTrialBest][iLast=-1][jY=1]
        historyBest = self._history[iTrialBest]
        finalBest = historyBest[-1]
        self.x, self.y = finalBest[0], finalBest[1]
        objectiveBest = finalBest[2]
        self.write('+++ Best trial:\n    [' + str(iTrialBest) + '] x: ',
                   self.x, '\n        y: ', self.y, '\n        objective: ',
                   objectiveBest)
        return self.x, self.y
Exemplo n.º 5
0
    def minimizeLeastSquares(self, method, c0, **kwargs):
        """
        Minimizes least squares: sum(self.f(self.X)-self.Y)^2)/X.size
            for SINGLE initial fit param set
        Updates self.ready and self.weights according to success of optimizer

        Args:
            method (str):
                optimizing method for minimizing objective function
                [recommendation: 'BFGS' or 'L-BFGS-B' if ill-conditioned
                                 'Nelder-Mead' or 'Powell' if noisy data]

            c0 (1D array_like of float):
                initial guess of fit parameter set

            kwargs (dict, optional):
                keyword arguments:

                bounds (2-tuple of float or 2-tuple of 1D array_like of float):
                    list of pairs (xMin, xMax) limiting x

                ... specific optimizer options

        Returns:
            (dict {str: float or int or str}):
                results, see Model.train()

        """
        results = self.initResults('method', method)
        self.weights = None  # required by Model.predict()
        self.ready = True  # required by Model.predict()

        if method in self.scipyMinimizers:
            if method.startswith('bas'):
                nItMax = kwargs.get('nItMax', 100)

                res = scipy.optimize.basinhopping(func=self.meanSquareErrror,
                                                  x0=c0,
                                                  niter=nItMax,
                                                  T=1.0,
                                                  stepsize=0.5,
                                                  minimizer_kwargs=None,
                                                  take_step=None,
                                                  accept_test=None,
                                                  callback=None,
                                                  interval=50,
                                                  disp=False,
                                                  niter_success=None)
                if 'success' in res.message[0]:
                    results['weights'] = np.atleast_1d(res.x)
                    results['iterations'] = res.nit
                    results['evaluations'] = res.nfev
                else:
                    self.write('\n??? ', method, ': ', res.message)

            elif method.startswith('dif'):
                nItMax = kwargs.get('nItMax', None)

                res = scipy.optimize.differential_evolution(
                    func=self.meanSquareErrror,
                    bounds=[[-10, 10]] * c0.size,
                    strategy='best1bin',
                    maxiter=nItMax,
                    popsize=15,
                    tol=0.01,
                    mutation=(0.5, 1),
                    recombination=0.7,
                    seed=None,
                    disp=False,
                    polish=True,
                    init='latinhypercube')
                if res.success:
                    results['weights'] = np.atleast_1d(res.x)
                    results['iterations'] = res.nit
                    results['evaluations'] = res.nfev
                else:
                    self.write('\n??? ', method, ': ', res.message)
            else:
                validKeys = ['nItMax', 'adaptive', 'goal']
                kw = {}
                if any(k in kwargs for k in validKeys):
                    kw['options'] = {}
                    kw['options']['maxiter'] = kwargs.get('nItMax', None)
                    if method == 'Nelder-Mead':
                        kw['options']['xatol'] = kwargs.get('goal', 1e-4)
                try:
                    res = scipy.optimize.minimize(fun=self.meanSquareErrror,
                                                  x0=c0,
                                                  method=method,
                                                  **kw)
                    if res.success:
                        results['weights'] = np.atleast_1d(res.x)
                        results['iterations'] = res.nit \
                            if method != 'COBYLA' else -1
                        results['evaluations'] = res.nfev
                    else:
                        self.write('\n??? ', method, ': ', res.message)
                except scipy.optimize.OptimizeWarning:
                    results['weights'] = None
                    self.write('\n??? ', method, ': ', res.message)

        elif method in self.scipyRootFinders:
            nItMax = kwargs.get('nItMax', 0)

            if method.startswith('lm'):
                res = scipy.optimize.root(
                    fun=self.difference,
                    x0=c0,
                    args=(),
                    method='lm',
                    jac=None,
                    tol=None,
                    callback=None,
                    options={  # 'func': None,mesg:_root_leastsq() got multiple
                        #                       values for argument 'func'
                        'col_deriv': 0,
                        'xtol': 1.49012e-08,
                        'ftol': 1.49012e-8,
                        'gtol': 0.,
                        'maxiter': nItMax,
                        'eps': 0.0,
                        'factor': 100,
                        'diag': None
                    })
                if res.success:
                    results['weights'] = np.atleast_1d(res.x)
                    results['iterations'] = -1
                    results['evaluations'] = res.nfev
                else:
                    self.write('\n??? ', method, ': ', res.message)
            else:
                print("\n??? method:'" + str(method) + "' not implemented")

        elif method in self.scipyEquationMinimizers:
            if method.startswith('leastsq'):
                x, cov_x, infodict, mesg, ier = scipy.optimize.leastsq(
                    self.difference, c0, full_output=True)
                if ier in [1, 2, 3, 4]:
                    results['weights'] = np.atleast_1d(x)
                    results['iterations'] = -1
                    results['evaluations'] = infodict['nfev']
                else:
                    self.write('\n??? ', method, ': ', mesg)

            elif method == 'least_squares':
                res = scipy.optimize.least_squares(self.difference, c0)
                if res.success:
                    results['weights'] = np.atleast_1d(res.x)
                    results['iterations'] = -1
                    results['evaluations'] = res.nfev
                else:
                    self.write('\n??? ', method, ': ', res.message)

        elif method in self.geneticMinimizers:
            if 'modestga' in sys.modules and method == 'ga':
                assert method != 'ga' or 'modestga' in sys.modules

                validKeys = ['tol', 'options', 'bounds']
                # see scipy's minimize
                kw = {k: kwargs[k] for k in validKeys if k in kwargs}
                res = mg.minimize(
                    fun=self.meanSquareErrror,
                    x0=c0,
                    # TODO method=method,
                    **kw)
                if True:  # TODO res.success:
                    results['weights'] = np.atleast_1d(res.x)
                    results['iterations'] = -1  # res.nit
                    results['evaluations'] = res.ng  # TODO res.nfev =? ng
                else:
                    self.write('\n??? ', method, ': ', res.message)
        else:
            assert 0, '??? LightGray, invalid method: ' + str(method)

        self.weights = results['weights']
        self.ready = self.weights is not None

        return results
Exemplo n.º 6
0
    fx_hist = args[1]
    x_hist.append(x)
    fx_hist.append(fx)


bounds = [(-5.12, 5.12) for i in range(N)]

x_hist = list()
fx_hist = list()
args = (x_hist, fx_hist)

options = {
    'generations': 500,
}

res = minimize(fun, bounds, args=args, callback=callback, options=options)

# Print optimization result
print(res)

if __name__ == "__main__":
    # Plot solution history
    fig, ax = plt.subplots(2, 1, sharex=True)
    ax[0].plot(x_hist, alpha=0.1, color='k')
    ax[0].set_title('x')
    ax[1].plot(fx_hist, color='k')
    ax[1].set_title('f(x) = RASTRIGIN FUNC.')
    ax[1].set_xlabel('Generation')

    plt.show()