Exemplo n.º 1
0
    def optimizeLambda(self, marginal, deltaRHS):
        """
        Compute the lambda values

        Parameters
        ----------
        marginal : int
            The indice of the perturbed marginal.
        deltaRHS : sequence of float of dim 2
            The values of the mean and variance + mean^2
        """

        # define the optimization function which the Lagrange function
        # and using the gradient and the hessian
        optimFunc = ot.PythonFunction(2, 1, lambda lamb: [self.H(marginal, lamb, deltaRHS)],
                            gradient=lambda lamb: self.gradH(marginal, lamb, deltaRHS),
                            hessian=lambda lamb: self.hessianH(marginal, lamb))

        # define the optimization problem
        optimPb = ot.OptimizationProblem(optimFunc,
                                         ot.NumericalMathFunction(),
                                         ot.NumericalMathFunction(),
                                         ot.Interval())

        # solve the problem using SLSQP from NLopt
        optim = ot.NLopt(optimPb, 'LD_SLSQP')
        optim.setStartingPoint([0, 0])
        optim.run()
        # return the lambda values, solution of the problem
        return optim.getResult().getOptimalPoint()
# %%
# Solving the problem with NLopt
# ------------------------------
#
# We see that the `Cobyla` algorithm required lots of function evaluations. This is why we now use the `NLopt` class with the LBFGS algorithm. However, the algorithm may use input points which are far away from the input domain we used so far. This is why we had bounds to the problem, so that the algorithm never goes to far away from the valley.

# %%
bounds = ot.Interval(lowerbound, upperbound)

# %%
problem = ot.OptimizationProblem(rosenbrock)
problem.setBounds(bounds)

# %%
algo = ot.NLopt(problem, 'LD_LBFGS')
algo.setStartingPoint(x0)
algo.run()

# %%
result = algo.getResult()

# %%
xoptim = result.getOptimalPoint()
xoptim

# %%
delta = xexact - xoptim
absoluteError = delta.norm()
absoluteError
# Calibration of default optimizer
ot.ResourceMap.SetAsScalar(
    'GeneralLinearModelAlgorithm-DefaultOptimizationLowerBound', 1.0e-5)
ot.ResourceMap.SetAsScalar(
    'GeneralLinearModelAlgorithm-DefaultOptimizationUpperBound', 100)
# Data & estimation
inputDimension = 1
X = ot.Normal().getSample(100)
X = X.sortAccordingToAComponent(0)
covarianceModel = ot.SquaredExponential([1.0], [1.0])
model = ot.SymbolicFunction(["x"], ["x - 0.6 * cos(x/3)"])
Y = model(X)
basis = ot.QuadraticBasisFactory(inputDimension).build()
algo = ot.GeneralLinearModelAlgorithm(X, Y, covarianceModel, basis, True)
algo.setOptimizationAlgorithm(ot.NLopt('LN_NELDERMEAD'))
algo.run()

# perform an evaluation
result = algo.getResult()
metaModel = result.getMetaModel()
conditionalCovariance = result.getCovarianceModel()
residual = metaModel(X) - Y
ott.assert_almost_equal(residual.computeCenteredMoment(2),
                        [1.06e-05], 1e-5, 1e-5)
ott.assert_almost_equal(conditionalCovariance.getParameter(),
                        [0.619144, 0.000937], 5e-3, 1e-3)
likelihood = algo.getObjectiveFunction()
assert likelihood.getInputDimension() == 1, "likelihood dim"
print("ok")
Exemplo n.º 4
0
startingPoint = [0.0] * dim

bounds = ot.Interval([-3.] * dim, [5.] * dim)
algoNames = ot.NLopt.GetAlgorithmNames()

for algoName in algoNames:

    # STOGO might not be enabled
    # NEWUOA nan/-nan
    # COBYLA crashes on squeeze
    # ESCH not same results with 2.4.1
    if 'STOGO' in algoName or 'NEWUOA' in algoName or 'COBYLA' in algoName or 'ESCH' in algoName:
        print('-- Skipped: algo=', algoName)
        continue

    algo = ot.NLopt(algoName)

    for minimization in [True, False]:
        for inequality in [True, False]:
            for equality in [True, False]:
                problem = ot.OptimizationProblem(
                    linear, ot.NumericalMathFunction(), ot.NumericalMathFunction(), bounds)
                problem.setMinimization(minimization)
                if inequality:
                    # x3 <= x1
                    problem.setInequalityConstraint(ot.NumericalMathFunction(
                        ['x1', 'x2', 'x3', 'x4'], ['ineq'], ['x1-x3']))
                if equality:
                    # x4 = 2
                    problem.setEqualityConstraint(ot.NumericalMathFunction(
                        ['x1', 'x2', 'x3', 'x4'], ['eq'], ['x4-2']))
Exemplo n.º 5
0
for algoName in algoNames:
    # STOGO/AGS might not be enabled
    if 'STOGO' in algoName or 'AGS' in algoName:
        continue

    # NEWUOA nan/-nan
    # COBYLA crashes on squeeze
    # ESCH not same results with 2.4.1
    # AUGLAG_EQ raises a roundoff-limited exception on i386
    # LD_SLSQP/LD_CCSAQ not same point on i386
    if 'NEWUOA' in algoName or 'COBYLA' in algoName or 'ESCH' in algoName or 'AUGLAG_EQ' in algoName or 'LD_SLSQP' in algoName or 'LD_CCSAQ' in algoName:
        print('-- Skipped: algo=', algoName)
        continue

    algo = ot.NLopt(algoName)

    for minimization in [True, False]:
        for inequality in [True, False]:
            for equality in [True, False]:
                for bound in [True, False]:

                    if not bound and 'LN_BOBYQA' in algoName:
                        continue

                    print('algo=', algoName, 'minimization=', minimization,
                          'bounds=', bound, 'inequality=', inequality,
                          'equality=', equality)
                    problem = ot.OptimizationProblem(f)
                    problem.setMinimization(minimization)
                    if inequality:
Exemplo n.º 6
0
    ot.ResourceMap.SetAsNumericalScalar(
        'GeneralizedLinearModelAlgorithm-DefaultOptimizationLowerBound',
        1.0e-5)
    ot.ResourceMap.SetAsNumericalScalar(
        'GeneralizedLinearModelAlgorithm-DefaultOptimizationUpperBound', 100)
    # Data & estimation
    spatialDimension = 1
    X = ot.Normal().getSample(100)
    X = X.sortAccordingToAComponent(0)
    covarianceModel = ot.SquaredExponential([1.0], [1.0])
    model = ot.NumericalMathFunction(["x"], ["x - 0.6 * cos(x/3)"])
    Y = model(X)
    basis = ot.QuadraticBasisFactory(spatialDimension).build()
    algo = ot.GeneralizedLinearModelAlgorithm(X, Y, covarianceModel, basis,
                                              True)
    algo.setOptimizationSolver(ot.NLopt('LN_NELDERMEAD'))
    algo.run()

    # perform an evaluation
    result = algo.getResult()
    metaModel = result.getMetaModel()
    conditionalCovariance = result.getCovarianceModel()
    residual = metaModel(X) - Y
    assert_almost_equal(residual.computeCenteredMoment(2), [1.06e-05], 1e-5,
                        1e-5)
    assert_almost_equal(conditionalCovariance.getParameter(),
                        [0.702138, 0.00137], 5e-3, 1e-3)
    print("Test Ok")

except:
    import sys
Exemplo n.º 7
0
        print('OK')
    except:
        print('no')

    # check that hmat library was found
    print('7: HMatrix (hmat-oss)'.ljust(width), end=' ')
    if ot.HMatrixFactory.IsAvailable():
        print('OK')
    else:
        print('no')

    # check that nlopt library was found
    print('8: optimization (NLopt)'.ljust(width), end=' ')
    try:
        problem = ot.OptimizationProblem()
        algo = ot.NLopt('LD_SLSQP')
        algo.setProblem(problem)
        print('OK')
    except:
        print('no')

    # check that TBB library was found
    print('9: multithreading (TBB)'.ljust(width), end=' ')
    if ot.TBB.IsAvailable():
        print('OK')
    else:
        print('no')

except:
    import os
    import traceback
Exemplo n.º 8
0
    ['x1', 'x2'], ['(x1 - 0.5)^2 + x2^2 - 4.0', '(x1 + 0.5)^2 + x2^2 - 4.0'])

# This is the perturbation function
noise = ot.SymbolicFunction(['x1', 'x2', 'xi1', 'xi2'],
                            ['x1 + xi1', 'x2 + xi2'])

# This is capital J: J(x,xi) = calJ(x+xi), the parametric objective function
JFull = ot.ComposedFunction(calJ, noise)
J = ot.ParametricFunction(JFull, [2, 3], [0.0] * 2)

# This is g, the parametric constraints
gFull = ot.ComposedFunction(calG, noise)
g = ot.ParametricFunction(gFull, [2, 3], [0.0] * 2)

bounds = ot.Interval([-3.0] * 2, [3.0] * 2)
solver = ot.NLopt('LD_SLSQP')
solver.setMaximumIterationNumber(100)

for sigma_xi in [0.1, 0.2, 0.3, 0.4, 0.5]:

    thetaDist = ot.Normal([0.0] * 2, [sigma_xi] * 2, ot.IdentityMatrix(2))
    robustnessMeasure = otrobopt.MeanMeasure(J, thetaDist)
    reliabilityMeasure = otrobopt.JointChanceMeasure(g, thetaDist, ot.Less(),
                                                     0.9)
    problem = otrobopt.RobustOptimizationProblem(robustnessMeasure,
                                                 reliabilityMeasure)
    problem.setBounds(bounds)

    algo = otrobopt.SequentialMonteCarloRobustAlgorithm(problem, solver)
    algo.setMaximumIterationNumber(11)
    algo.setMaximumAbsoluteError(1e-6)
printCovarianceParameterChange(covarianceModel, notUpdatedCovarianceModel)

# %%
# We see that the parameters did not change *at all*: disabling the optimization allows one to keep a constant covariance model. In a practical algorithm, we may, for example, add a block of 10 new points before updating the parameters of the covariance model. At this point, we may reuse the previous covariance model so that the optimization starts from a better point, compared to the parameters default values. This will reduce the cost of the optimization.

# %%
# Configure the local optimization solver
# ---------------------------------------

# %%
# The following example shows how to set the local optimization solver.
# We choose the SLSQP algorithm from NLOPT.

# %%
problem = solver.getProblem()
local_solver = ot.NLopt(problem, "LD_SLSQP")
covarianceModel = ot.SquaredExponential([1.0] * dimension, [1.0])
covarianceModel.setScale(maximum_scale_bounds)  # Trick A
algo = ot.KrigingAlgorithm(X_train, Y_train, covarianceModel, basis)
algo.setOptimizationBounds(scaleOptimizationBounds)  # Trick B
algo.setOptimizationAlgorithm(local_solver)
algo.run()

# %%
finetune_covariance_model = result.getCovarianceModel()
print(finetune_covariance_model)

# %%
printCovarianceParameterChange(finetune_covariance_model,
                               basic_covariance_model)