Example #1
0
from minimization import minimize_scalar

hvec = np.linspace(50, 300, 50)
err = np.zeros_like(hvec)
xmin, xmax = 50, 300
xinit = 0.5 * (xmin + xmax)

simPCM1D = simulationMCP_1D(15)
fsim = simPCM1D.solveEquation
errfunc = simPCM1D.sqrtError

start = time.time()
dictgold = minimize_scalar(errfunc,
                           xmin,
                           xmax,
                           returnDict=True,
                           precallfunc=fsim,
                           tol=5e-3,
                           method="goldenSearch")
inter = time.time()
dictgrad = minimize_scalar(errfunc,
                           xmin,
                           xmax,
                           precallfunc=fsim,
                           method="scalarGradient",
                           xinit=xinit,
                           gtol=1e-2,
                           tol=5e-3,
                           returnDict=True,
                           deriveMethod="finite-difference",
                           dh=5e-3,
from modeleMCP import simulationMCP_0D as fsim

import sys
plt.rc('font',family='Serif')

sys.path.append("../../minimisation")
from minimization import minimize_scalar


hvec = np.linspace(50,300,200)
err = np.zeros_like(hvec)
xmin,xmax = 50,300
xinit = 0.5*(xmin+xmax)

start = time.time()
dictgold = minimize_scalar(fsim,xmin,xmax,returnDict=True,tol=1e-6,method="goldenSearch")
inter = time.time()
dictgrad = minimize_scalar(fsim,xmin,xmax,
                method="scalarGradient",
                xinit=xinit,
                gtol= 1e-3,
                tol = 1e-6,
                returnDict=True,
                deriveMethod="finite-difference",
                dh=1e-3,
                maxIter=25,
                )
end = time.time()

xgold = dictgold["x"]
xgrad = dictgrad["x"]
Example #3
0
xmin, xmax = 3, 7
xinit = (xmin + xmax) / 2

f = lambda x: -np.sin(0.25 * x**1.2 + x) + np.sin(10 / 3 * x - 2 * np.pi / 3)
c = lambda x: (4.5 - x) * (x - 5.8)
r = 500
fpenal = lambda x: f(x) + r * np.minimum(c(x), 0)**2
x = np.linspace(xmin, xmax, 250)
y = f(x)

constraints = [{"type": "ineq", 'fun': c}]

start = time.time()
dictgold = minimize_scalar(f,
                           xmin,
                           xmax,
                           returnDict=True,
                           constraints=constraints,
                           method='goldenSearch')
inter = time.time()
dictgrad = minimize_scalar(f,
                           xmin,
                           xmax,
                           returnDict=True,
                           constraints=constraints,
                           method='scalarGradient',
                           xinit=xinit)
end = time.time()

xgold = dictgold["x"]
xgrad = dictgrad["x"]
Example #4
0
sys.path.append("..")
from minimization import minimize_scalar

# FONCTION ET MINIMISATION
xmin, xmax = 0.001, 0.99
xinit = (xmax + xmin) / 2

f = lambda x: -x**(2 / 3) - (1 - x**2)**(1 / 3)
x = np.linspace(xmin, xmax, 250)
y = f(x)

start = time.time()
dictgold = minimize_scalar(f,
                           xmin,
                           xmax,
                           returnDict=True,
                           method='goldenSearch')
inter = time.time()
dictgrad = minimize_scalar(f,
                           xmin,
                           xmax,
                           returnDict=True,
                           method="scalarGradient",
                           xinit=xinit)
end = time.time()

xgold = dictgold["x"]
xgrad = dictgrad["x"]

fgold = dictgold['fmin']
Example #5
0
sys.path.append("..")
from minimization import minimize_scalar

# FONCTION ET MINIMISATION
xmin, xmax = 2.7, 7.5
xinit = (xmax + xmin) / 2

f = lambda x: np.sin(x) * x + np.sin(10 / 3 * x)
df = lambda x: np.cos(x) * x + np.sin(x) + 10 / 3 * np.cos(10 / 3 * x)
x = np.linspace(xmin, xmax, 250)
y = f(x)

start = time.time()
dictgold = minimize_scalar(f,
                           xmin,
                           xmax,
                           returnDict=True,
                           method="goldenSearch")
inter = time.time()
dictgrad = minimize_scalar(f,
                           xmin,
                           xmax,
                           xinit=xinit,
                           returnDict=True,
                           gf=df,
                           method="scalarGradient")
end = time.time()

xgold = dictgold["x"]
xgrad = dictgrad["x"]