Esempio n. 1
0
    def optimize(self, optName, optOptions={}, storeHistory=False, places=5):
        # Optimization Object
        optProb = Optimization('HS071 Constraint Problem', objfunc)

        # Design Variables
        x0 = [1.0, 5.0, 5.0, 1.0]
        optProb.addVarGroup('xvars', 4, lower=1, upper=5, value=x0)

        # Constraints
        optProb.addConGroup('con', 2, lower=[25, 40], upper=[None, 40])

        # Objective
        optProb.addObj('obj')

        # Optimizer
        try:
            opt = OPT(optName, options=optOptions)
        except:
            raise unittest.SkipTest('Optimizer not available:', optName)

        sol = opt(optProb, sens=sens)

        # Check Solution
        self.assertAlmostEqual(sol.objectives['obj'].value, 17.0140172, places=places)

        self.assertAlmostEqual(sol.variables['xvars'][0].value, 1.0, places=places)
        self.assertAlmostEqual(sol.variables['xvars'][1].value, 4.743, places=places)
        self.assertAlmostEqual(sol.variables['xvars'][2].value, 3.82115, places=places)
        self.assertAlmostEqual(sol.variables['xvars'][3].value, 1.37941, places=places)

        if hasattr(sol, 'lambdaStar'):
            self.assertAlmostEqual(sol.lambdaStar['con'][0], 0.55229366, places=places)
            self.assertAlmostEqual(sol.lambdaStar['con'][1], -0.16146857, places=places)
    def test_sens(self):
        termcomp = TerminateComp(max_sens=3)
        optProb = Optimization("Paraboloid", termcomp.objfunc)

        optProb.addVarGroup("x", 1, type="c", lower=-50.0, upper=50.0, value=0.0)
        optProb.addVarGroup("y", 1, type="c", lower=-50.0, upper=50.0, value=0.0)
        optProb.finalizeDesignVariables()

        optProb.addObj("obj")

        optProb.addConGroup("con", 1, lower=-15.0, upper=-15.0, wrt=["x", "y"], linear=True, jac=con_jac)

        test_name = "SNOPT_user_termination_sens"
        optOptions = {
            "Print file": "{}.out".format(test_name),
            "Summary file": "{}_summary.out".format(test_name),
        }
        try:
            opt = SNOPT(options=optOptions)
        except Error:
            raise unittest.SkipTest("Optimizer not available: SNOPT")

        sol = opt(optProb, sens=termcomp.sens)

        self.assertEqual(termcomp.sens_count, 4)

        # Exit code for user requested termination.
        self.assertEqual(sol.optInform["value"], 71)
Esempio n. 3
0
    def optimize(self, optName, tol, optOptions={}, storeHistory=False, hotStart=None):
        self.nf = 0  # number of function evaluations
        self.ng = 0  # number of gradient evaluations
        # Optimization Object
        optProb = Optimization("HS15 Constraint Problem", self.objfunc)

        # Design Variables
        lower = [-5.0, -5.0]
        upper = [0.5, 5.0]
        value = [-2, 1.0]
        optProb.addVarGroup("xvars", 2, lower=lower, upper=upper, value=value)

        # Constraints
        lower = [1.0, 0.0]
        upper = [None, None]
        optProb.addConGroup("con", 2, lower=lower, upper=upper)

        # Objective
        optProb.addObj("obj")

        # Check optimization problem:
        print(optProb)

        # Optimizer
        try:
            opt = OPT(optName, options=optOptions)
        except Error:
            raise unittest.SkipTest("Optimizer not available:", optName)

        # Solution
        if storeHistory is not None:
            if storeHistory is True:
                self.histFileName = "%s_hs015_Hist.hst" % (optName.lower())
            elif isinstance(storeHistory, str):
                self.histFileName = storeHistory
        else:
            self.histFileName = None

        sol = opt(optProb, sens=self.sens, storeHistory=self.histFileName, hotStart=hotStart)

        # Test printing solution to screen
        print(sol)

        # Check Solution
        self.fStar1 = 306.5
        self.fStar2 = 360.379767

        self.xStar1 = (0.5, 2.0)
        self.xStar2 = (-0.79212322, -1.26242985)

        dv = sol.getDVs()
        sol_xvars = [sol.variables["xvars"][i].value for i in range(2)]
        assert_allclose(sol_xvars, dv["xvars"], atol=tol, rtol=tol)
        # we check either optimum via try/except
        try:
            assert_allclose(sol.objectives["obj"].value, self.fStar1, atol=tol, rtol=tol)
            assert_allclose(dv["xvars"], self.xStar1, atol=tol, rtol=tol)
        except AssertionError:
            assert_allclose(sol.objectives["obj"].value, self.fStar2, atol=tol, rtol=tol)
            assert_allclose(dv["xvars"], self.xStar2, atol=tol, rtol=tol)
Esempio n. 4
0
    def test_opt_bug1(self):
        # Due to a new feature, there is a TypeError when you optimize a model without a constraint.
        optProb = Optimization("Paraboloid", objfunc_no_con)

        # Design Variables
        optProb.addVarGroup("x", 1, varType="c", lower=-50.0, upper=50.0, value=0.0)
        optProb.addVarGroup("y", 1, varType="c", lower=-50.0, upper=50.0, value=0.0)

        # Objective
        optProb.addObj("obj")

        test_name = "bugfix_SNOPT_bug1"
        optOptions = {
            "Major feasibility tolerance": 1e-1,
            "Print file": f"{test_name}.out",
            "Summary file": f"{test_name}_summary.out",
        }

        # Optimizer
        try:
            opt = SNOPT(options=optOptions)
        except Error as e:
            if "There was an error importing" in e.message:
                raise unittest.SkipTest("Optimizer not available: SNOPT")
            raise e

        opt(optProb, sens=sens)
Esempio n. 5
0
    def test_opt(self):
        # Optimization Object
        optProb = Optimization("Paraboloid", objfunc)

        # Design Variables
        optProb.addVarGroup("x", 1, varType="c", lower=-50.0, upper=50.0, value=0.0)
        optProb.addVarGroup("y", 1, varType="c", lower=-50.0, upper=50.0, value=0.0)

        # Objective
        optProb.addObj("obj")

        # Equality Constraint
        optProb.addConGroup("con", 1, lower=-15.0, upper=-15.0, wrt=["x", "y"], linear=True, jac=con_jac)

        # Check optimization problem:
        print(optProb)
        test_name = "bugfix_SNOPT_test_opt"
        optOptions = {
            "Major feasibility tolerance": 1e-1,
            "Print file": "{}.out".format(test_name),
            "Summary file": "{}_summary.out".format(test_name),
        }

        # Optimizer
        try:
            opt = SNOPT(options=optOptions)
        except Error:
            raise unittest.SkipTest("Optimizer not available: SNOPT")

        sol = opt(optProb, sens=sens)

        # Check Solution 7.166667, -7.833334
        tol = 1e-6
        assert_allclose(sol.variables["x"][0].value, 7.166667, atol=tol, rtol=tol)
        assert_allclose(sol.variables["y"][0].value, -7.833333, atol=tol, rtol=tol)
    def test_opt(self):
        # Instantiate Optimization Problem
        optProb = Optimization("Rosenbrock function", objfunc)
        optProb.addVar("x", "c", value=0, lower=-600, upper=600)
        optProb.addVar("y", "c", value=0, lower=-600, upper=600)

        optProb.addObj("obj1")
        optProb.addObj("obj2")

        # 300 generations will find x=(0,0), 200 or less will find x=(1,1)
        options = {"maxGen": 200}

        # Optimizer
        try:
            opt = NSGA2(options=options)
        except Error:
            raise unittest.SkipTest("Optimizer not available:", "NSGA2")

        sol = opt(optProb)

        # Check Solution
        tol = 1e-2
        assert_allclose(sol.variables["x"][0].value, 1.0, atol=tol, rtol=tol)

        assert_allclose(sol.variables["y"][0].value, 1.0, atol=tol, rtol=tol)
def large_sparse(optimizer="SNOPT", optOptions=None):
    opt_options = {} if optOptions is None else optOptions

    # Optimization Object
    optProb = Optimization("large and sparse", objfunc)

    # Design Variables
    optProb.addVar("x", lower=-100, upper=150, value=0)
    optProb.addVarGroup("y", N, lower=-10 - arange(N), upper=arange(N), value=0)
    optProb.addVarGroup("z", 2 * N, upper=arange(2 * N), lower=-100 - arange(2 * N), value=0)
    # Constraints
    optProb.addCon("con1", upper=100, wrt=["x"])
    optProb.addCon("con2", upper=100)
    optProb.addCon("con3", lower=4, wrt=["x", "z"])
    optProb.addConGroup(
        "lincon",
        N,
        lower=2 - 3 * arange(N),
        linear=True,
        wrt=["x", "y"],
        jac={"x": np.ones((N, 1)), "y": sparse.spdiags(np.ones(N), 0, N, N)},
    )
    optProb.addObj("obj")
    # Optimizer
    opt = OPT(optimizer, options=opt_options)
    optProb.printSparsity()

    return opt, optProb
Esempio n. 8
0
    def test_autorefine(self):
        # Optimization Object
        optProb = Optimization("TP109 Constraint Problem", objfunc)

        # Design Variables (Removed infinite bounds for ALPSO)
        lower = [0.0, 0.0, -0.55, -0.55, 196, 196, 196, -400, -400]
        upper = [2000, 2000, 0.55, 0.55, 252, 252, 252, 800, 800]
        value = [0, 0, 0, 0, 0, 0, 0, 0, 0]
        optProb.addVarGroup("xvars", 9, lower=lower, upper=upper, value=value)

        # Constraints
        lower = [0, 0, 0, 0, 0, 0, 0, 0]
        upper = [None, None, 0, 0, 0, 0, 0, 0]
        if not USE_LINEAR:
            lower.extend([0, 0])
            upper.extend([None, None])

        optProb.addConGroup("con", len(lower), lower=lower, upper=upper)

        # And the 2 linear constriants
        if USE_LINEAR:
            jac = np.zeros((1, 9))
            jac[0, 3] = 1.0
            jac[0, 2] = -1.0
            optProb.addConGroup("lin_con",
                                1,
                                lower=-0.55,
                                upper=0.55,
                                wrt=["xvars"],
                                jac={"xvars": jac},
                                linear=True)

        # Objective
        optProb.addObj("obj")

        # Check optimization problem:
        # optProb.printSparsity()

        # Global Optimizer: ALPSO
        try:
            opt1 = OPT("ALPSO")
        except Error:
            raise unittest.SkipTest("Optimizer not available:", "ALPSO")

        # Get first Solution
        sol1 = opt1(optProb)

        # Now run the previous solution with SNOPT
        try:
            opt2 = OPT("SNOPT")
        except Error:
            raise unittest.SkipTest("Optimizer not available:", "SNOPT")

        sol2 = opt2(sol1)

        # Check Solution
        assert_allclose(sol2.objectives["obj"].value,
                        0.536206927538e04,
                        atol=1e-2,
                        rtol=1e-2)
Esempio n. 9
0
    def test_opt(self):
        # Optimization Object
        optProb = Optimization('Paraboloid', objfunc)

        # Design Variables
        optProb.addVarGroup('x', 1, type='c', lower=-50.0, upper=50.0, value=0.0)
        optProb.addVarGroup('y', 1, type='c', lower=-50.0, upper=50.0, value=0.0)
        optProb.finalizeDesignVariables()

        # Objective
        optProb.addObj('obj')

        # Equality Constraint
        optProb.addConGroup('con', 1, lower=-15.0, upper=-15.0, wrt=['x', 'y'], linear=True, jac=con_jac)

        # Check optimization problem:
        print(optProb)


        # Optimizer
        try:
            opt = SNOPT(optOptions = {'Major feasibility tolerance' : 1e-1})
        except:
            raise unittest.SkipTest('Optimizer not available: SNOPT')

        sol = opt(optProb, sens=sens)

        # Check Solution 7.166667, -7.833334
        self.assertAlmostEqual(sol.variables['x'][0].value, 7.166667, places=6)
        self.assertAlmostEqual(sol.variables['y'][0].value, -7.833333, places=6)
Esempio n. 10
0
    def optimize(self, optName, tol, optOptions={}, storeHistory=False):
        # Optimization Object
        optProb = Optimization("large and sparse", objfunc)

        # Design Variables
        optProb.addVar("x", lower=-100, upper=150, value=0)
        optProb.addVarGroup("y", N, lower=-10 - arange(N), upper=arange(N), value=0)
        optProb.addVarGroup("z", 2 * N, upper=arange(2 * N), lower=-100 - arange(2 * N), value=0)

        # Constraints
        optProb.addCon("con1", upper=100, wrt=["x"])
        optProb.addCon("con2", upper=100)
        optProb.addCon("con3", lower=4, wrt=["x", "z"])
        optProb.addConGroup(
            "lincon",
            N,
            lower=2 - 3 * arange(N),
            linear=True,
            wrt=["x", "y"],
            jac={"x": np.ones((N, 1)), "y": sparse.spdiags(np.ones(N), 0, N, N)},
        )
        optProb.addObj("obj")

        # Optimizer
        try:
            opt = OPT(optName, options=optOptions)
        except Error:
            raise unittest.SkipTest("Optimizer not available:", optName)

        sol = opt(optProb, sens=sens)

        # Check Solution
        assert_allclose(sol.objectives["obj"].value, 10.0, atol=tol, rtol=tol)

        assert_allclose(sol.variables["x"][0].value, 2.0, atol=tol, rtol=tol)
Esempio n. 11
0
    def setup_optProb(self, nObj=1, nDV=[4], nCon=[2], xScale=[1.0], objScale=[1.0], conScale=[1.0], offset=[0.0]):
        """
        This function sets up a general optimization problem, with arbitrary
        DVs, constraints and objectives.
        Arbitrary scaling for the various parameters can also be specified.
        """
        self.nObj = nObj
        self.nDV = nDV
        self.nCon = nCon
        self.xScale = xScale
        self.objScale = objScale
        self.conScale = conScale
        self.offset = offset

        # Optimization Object
        self.optProb = Optimization("Configurable Test Problem", self.objfunc)
        self.x0 = {}
        # Design Variables
        for iDV in range(len(nDV)):
            n = nDV[iDV]
            lower = np.random.uniform(-5, 2, n)
            upper = np.random.uniform(5, 20, n)
            x0 = np.random.uniform(lower, upper)
            dvName = "x{}".format(iDV)
            self.x0[dvName] = x0
            self.optProb.addVarGroup(
                dvName,
                n,
                lower=lower,
                upper=upper,
                value=x0,
                scale=xScale[iDV],
                offset=offset[iDV],
            )

        # Constraints
        for iCon in range(len(nCon)):
            nc = nCon[iCon]
            lower = np.random.uniform(-5, 2, nc)
            upper = np.random.uniform(5, 6, nc)
            self.optProb.addConGroup(
                "con_{}".format(iCon),
                nc,
                lower=lower,
                upper=upper,
                scale=conScale[iCon],
            )

        # Objective
        for iObj in range(nObj):
            self.optProb.addObj("obj_{}".format(iObj), scale=objScale[iObj])

        # Finalize
        self.optProb.printSparsity()
        # run optimization
        # we don't care about outputs, but this performs optimizer-specific re-ordering
        # of constraints so we need this to test mappings
        opt = OPT("slsqp", options={"IFILE": "optProb_SLSQP.out"})
        opt(self.optProb, "FD")
Esempio n. 12
0
    def test_autorefine(self):
        # Optimization Object
        optProb = Optimization('TP109 Constraint Problem', objfunc)

        # Design Variables (Removed infinite bounds for ALPSO)
        lower = [0.0, 0.0, -0.55, -0.55, 196, 196, 196, -400, -400]
        upper = [2000, 2000, 0.55, 0.55, 252, 252, 252, 800, 800]
        value = [0, 0, 0, 0, 0, 0, 0, 0, 0]
        optProb.addVarGroup('xvars', 9, lower=lower, upper=upper, value=value)

        # Constraints
        lower = [0, 0, 0, 0, 0, 0, 0, 0]
        upper = [None, None, 0, 0, 0, 0, 0, 0]
        if not USE_LINEAR:
            lower.extend([0, 0])
            upper.extend([None, None])

        optProb.addConGroup('con', len(lower), lower=lower, upper=upper)

        # And the 2 linear constriants
        if USE_LINEAR:
            jac = numpy.zeros((1, 9))
            jac[0, 3] = 1.0
            jac[0, 2] = -1.0
            optProb.addConGroup('lin_con',
                                1,
                                lower=-.55,
                                upper=0.55,
                                wrt=['xvars'],
                                jac={'xvars': jac},
                                linear=True)

        # Objective
        optProb.addObj('obj')

        # Check optimization problem:
        #optProb.printSparsity()

        # Global Optimizer: ALPSO
        try:
            opt1 = OPT('ALPSO')
        except:
            raise unittest.SkipTest('Optimizer not available:', 'ALPSO')

        # Get first Solution
        sol1 = opt1(optProb)

        # Now run the previous solution with SNOPT
        try:
            opt2 = OPT('SNOPT')
        except:
            raise unittest.SkipTest('Optimizer not available:', 'SNOPT')

        sol2 = opt2(sol1)

        # Check Solution
        self.assertAlmostEqual(sol2.objectives['obj'].value,
                               0.536206927538e+04,
                               places=2)
Esempio n. 13
0
    def setup_optProb(self):
        # Instantiate Optimization Problem
        self.optProb = Optimization("quadratic", self.objfunc)
        self.optProb.addVar("x", value=0, lower=-600, upper=600)
        self.optProb.addVar("y", value=0, lower=-600, upper=600)

        self.optProb.addObj("obj1")
        self.optProb.addObj("obj2")
Esempio n. 14
0
    def optimize(self, optName, optOptions={}, storeHistory=False, places=5):
        # Optimization Object
        optProb = Optimization('HS15 Constraint Problem', self.objfunc)

        # Design Variables
        lower = [-5.0, -5.0]
        upper = [0.5, 5.0]
        value = [-2, 1.0]
        optProb.addVarGroup('xvars', 2, lower=lower, upper=upper, value=value)

        # Constraints
        lower = [1.0, 0.0]
        upper = [None, None]
        optProb.addConGroup('con', 2, lower=lower, upper=upper)

        # Objective
        optProb.addObj('obj')

        # Check optimization problem:
        # print(optProb)

        # Optimizer
        try:
            opt = OPT(optName, options=optOptions)
        except:
            raise unittest.SkipTest('Optimizer not available:', optName)

        # Solution
        if storeHistory:
            histFileName = '%s_hs015_Hist.hst' % (optName.lower())
        else:
            histFileName = None

        sol = opt(optProb, sens=self.sens, storeHistory=histFileName)

        # Test printing solution to screen
        print(sol)

        # Check Solution
        fobj = sol.objectives['obj'].value
        diff = np.min(np.abs([fobj - 306.5, fobj - 360.379767]))
        self.assertAlmostEqual(diff, 0.0, places=places)

        xstar1 = (0.5, 2.0)
        xstar2 = (-0.79212322, -1.26242985)
        x1 = sol.variables['xvars'][0].value
        x2 = sol.variables['xvars'][1].value

        dv = sol.getDVs()
        self.assertAlmostEqual(x1, dv['xvars'][0], places=10)
        self.assertAlmostEqual(x2, dv['xvars'][1], places=10)

        diff = np.min(np.abs([xstar1[0] - x1, xstar2[0] - x1]))
        self.assertAlmostEqual(diff, 0.0, places=places)

        diff = np.min(np.abs([xstar1[1] - x2, xstar2[1] - x2]))
        self.assertAlmostEqual(diff, 0.0, places=places)
Esempio n. 15
0
    def optimize(self, optName, tol, optOptions={}, storeHistory=False, hotStart=None):
        self.nf = 0  # number of function evaluations
        self.ng = 0  # number of gradient evaluations
        # Optimization Object

        optProb = Optimization("Rosenbrock Problem", self.objfunc)

        n = 4  # Number of design variables
        np.random.seed(10)
        value = np.random.normal(size=n)

        lower = np.ones(n) * -50
        upper = np.ones(n) * 50
        optProb.addVarGroup("xvars", n, lower=lower, upper=upper, value=value)

        # Objective
        optProb.addObj("obj")

        # Check optimization problem:
        print(optProb)

        # Optimizer
        try:
            opt = OPT(optName, options=optOptions)
        except Error:
            raise unittest.SkipTest("Optimizer not available:", optName)

        # Solution
        if storeHistory is not None:
            if storeHistory is True:
                self.histFileName = "%s_Rsbrk_Hist.hst" % (optName.lower())
            elif isinstance(storeHistory, str):
                self.histFileName = storeHistory
        else:
            self.histFileName = None

        sol = opt(optProb, sens=self.sens, storeHistory=self.histFileName, hotStart=hotStart)

        # Test printing solution to screen
        print(sol)

        # Check Solution
        self.fStar1 = 0.0

        self.xStar1 = np.ones(n)

        dv = sol.getDVs()
        sol_xvars = [sol.variables["xvars"][i].value for i in range(n)]

        assert_allclose(sol_xvars, dv["xvars"], atol=tol, rtol=tol)
        assert_allclose(dv["xvars"], self.xStar1, atol=tol, rtol=tol)
        if optName == "SNOPT" and opt.version != "7.7.7":
            assert_allclose(sol.objectives["obj"].value, self.fStar1, atol=tol, rtol=tol)
        else:
            assert_allclose(sol.fStar, self.fStar1, atol=tol, rtol=tol)
Esempio n. 16
0
def pyopt_truss(truss, optimizer='snopt', options={}):
    '''
    Take the given problem and optimize it with the given optimizer
    from the pyOptSparse library of optimizers.
    '''
    # Import the optimization problem
    from pyoptsparse import Optimization, OPT

    class pyOptWrapper:
        def __init__(self, truss):
            self.truss = truss

        def objcon(self, x):
            fail, obj, con = self.truss.evalObjCon(x['x'])
            funcs = {'objective': obj, 'con': con}
            return funcs, fail

        def gobjcon(self, x, funcs):
            g = np.zeros(x['x'].shape)
            A = np.zeros((1, x['x'].shape[0]))
            fail = self.truss.evalObjConGradient(x['x'], g, A)
            sens = {'objective': {'x': g}, 'con': {'x': A}}
            return sens, fail

    # Set the design variables
    wrap = pyOptWrapper(truss)
    prob = Optimization('Truss', wrap.objcon)

    # Determine the initial variable values and their lower/upper
    # bounds in the design problem
    n = len(truss.conn)
    x0 = np.zeros(n)
    lower = np.zeros(n)
    upper = np.zeros(n)
    truss.getVarsAndBounds(x0, lower, upper)

    # Set the variable bounds and initial values
    prob.addVarGroup('x', n, value=x0, lower=lower, upper=upper)

    # Set the constraints
    prob.addConGroup('con', 1, lower=0.0, upper=0.0)

    # Add the objective
    prob.addObj('objective')

    # Optimize the problem
    try:
        opt = OPT(optimizer, options=options)
        sol = opt(prob, sens=wrap.gobjcon)
    except:
        opt = None
        sol = None

    return opt, prob, sol
Esempio n. 17
0
    def optimize(
        self,
        optName,
        tol,
        optOptions={},
        storeHistory=False,
        setDV=None,
        xScale=1.0,
        objScale=1.0,
        conScale=1.0,
        offset=0.0,
        check_solution=True,
    ):
        # Optimization Object
        optProb = Optimization("HS071 Constraint Problem", self.objfunc)

        # Design Variables
        x0 = [1.0, 5.0, 5.0, 1.0]
        optProb.addVarGroup("xvars", 4, lower=1, upper=5, value=x0, scale=xScale, offset=offset)

        # Constraints
        optProb.addConGroup("con", 2, lower=[25, 40], upper=[None, 40], scale=conScale)

        # Objective
        optProb.addObj("obj", scale=objScale)

        # Optimizer
        try:
            opt = OPT(optName, options=optOptions)
        except Error:
            raise unittest.SkipTest("Optimizer not available:", optName)

        if isinstance(setDV, str):
            optProb.setDVsFromHistory(setDV)
        elif isinstance(setDV, dict):
            optProb.setDVs(setDV)
            outDV = optProb.getDVs()
            assert_allclose(setDV["xvars"], outDV["xvars"])

        sol = opt(optProb, sens=self.sens, storeHistory=storeHistory)

        # Check Solution
        if check_solution:
            self.fStar = 17.0140172
            self.xStar = (1.0, 4.743, 3.82115, 1.37941)
            self.lambdaStar = (0.55229366, -0.16146857)
            assert_allclose(sol.objectives["obj"].value, self.fStar, atol=tol, rtol=tol)
            assert_allclose(sol.xStar["xvars"], self.xStar, atol=tol, rtol=tol)

            if hasattr(sol, "lambdaStar"):
                assert_allclose(sol.lambdaStar["con"], self.lambdaStar, atol=tol, rtol=tol)
        return sol
Esempio n. 18
0
    def optimize(self, optName, tol, optOptions={}):
        # Optimization Object
        optProb = Optimization("TP109 Constraint Problem", objfunc)

        # Design Variables
        lower = [0.0, 0.0, -0.55, -0.55, 196, 196, 196, -400, -400]
        upper = [None, None, 0.55, 0.55, 252, 252, 252, 800, 800]
        value = [0, 0, 0, 0, 0, 0, 0, 0, 0]
        optProb.addVarGroup("xvars", 9, lower=lower, upper=upper, value=value)

        # Constraints
        lower = [0, 0, 0, 0, 0, 0, 0, 0]
        upper = [None, None, 0, 0, 0, 0, 0, 0]
        if not USE_LINEAR:
            lower.extend([0, 0])
            upper.extend([None, None])

        optProb.addConGroup("con", len(lower), lower=lower, upper=upper)

        # And the 2 linear constriants
        if USE_LINEAR:
            jac = np.zeros((1, 9))
            jac[0, 3] = 1.0
            jac[0, 2] = -1.0
            optProb.addConGroup("lin_con",
                                1,
                                lower=-0.55,
                                upper=0.55,
                                wrt=["xvars"],
                                jac={"xvars": jac},
                                linear=True)

        # Objective
        optProb.addObj("obj")

        # Check optimization problem:
        # optProb.printSparsity()

        # Optimizer
        try:
            opt = OPT(optName, options=optOptions)
        except Error:
            raise unittest.SkipTest("Optimizer not available:", optName)

        # Solution
        sol = opt(optProb, sens="CS")

        # Check Solution
        assert_allclose(sol.objectives["obj"].value,
                        0.536206927538e04,
                        atol=tol,
                        rtol=tol)
Esempio n. 19
0
    def setup_optProb(self):
        # Optimization Object
        self.optProb = Optimization("Rosenbrock Problem", self.objfunc)

        np.random.seed(10)
        value = np.random.normal(size=self.N)

        lower = np.ones(self.N) * -50
        upper = np.ones(self.N) * 50
        self.optProb.addVarGroup("xvars", self.N, lower=lower, upper=upper, value=value)

        # Objective
        self.optProb.addObj("obj")
Esempio n. 20
0
    def setup_optProb(self, xScale=1.0, objScale=1.0, conScale=1.0, offset=0.0):
        # Optimization Object
        self.optProb = Optimization("HS071 Constraint Problem", self.objfunc, sens=self.sens)

        # Design Variables
        x0 = [1.0, 5.0, 5.0, 1.0]
        self.optProb.addVarGroup("xvars", 4, lower=1, upper=5, value=x0, scale=xScale, offset=offset)

        # Constraints
        self.optProb.addConGroup("con", 2, lower=[25, 40], upper=[None, 40], scale=conScale)

        # Objective
        self.optProb.addObj("obj", scale=objScale)
Esempio n. 21
0
    def optimize(self, optName, optOptions={}, places=2):
        # Optimization Object
        optProb = Optimization('TP109 Constraint Problem', objfunc)

        # Design Variables
        lower = [0.0, 0.0, -0.55, -0.55, 196, 196, 196, -400, -400]
        upper = [None, None, 0.55, 0.55, 252, 252, 252, 800, 800]
        value = [0, 0, 0, 0, 0, 0, 0, 0, 0]
        optProb.addVarGroup('xvars', 9, lower=lower, upper=upper, value=value)

        # Constraints
        lower = [0, 0, 0, 0, 0, 0, 0, 0]
        upper = [None, None, 0, 0, 0, 0, 0, 0]
        if not USE_LINEAR:
            lower.extend([0, 0])
            upper.extend([None, None])

        optProb.addConGroup('con', len(lower), lower=lower, upper=upper)

        # And the 2 linear constriants
        if USE_LINEAR:
            jac = numpy.zeros((1, 9))
            jac[0, 3] = 1.0
            jac[0, 2] = -1.0
            optProb.addConGroup('lin_con',
                                1,
                                lower=-.55,
                                upper=0.55,
                                wrt=['xvars'],
                                jac={'xvars': jac},
                                linear=True)

        # Objective
        optProb.addObj('obj')

        # Check optimization problem:
        #optProb.printSparsity()

        # Optimizer
        try:
            opt = OPT(optName, options=optOptions)
        except:
            raise unittest.SkipTest('Optimizer not available:', optName)

        # Solution
        sol = opt(optProb, sens='CS')

        # Check Solution
        self.assertAlmostEqual(sol.objectives['obj'].value,
                               0.536206927538e+04,
                               places=places)
Esempio n. 22
0
    def setup_optProb(self, sparse=True):
        # set N
        if sparse:
            self.N = 10000
        else:
            self.N = 500

        # Optimization Object
        self.optProb = Optimization("large and sparse",
                                    self.objfunc,
                                    sens=self.sens)

        # Design Variables
        self.optProb.addVar("x", lower=-100, upper=150, value=0)
        self.optProb.addVarGroup("y",
                                 self.N,
                                 lower=-10 - np.arange(self.N),
                                 upper=np.arange(self.N),
                                 value=0)
        self.optProb.addVarGroup("z",
                                 2 * self.N,
                                 upper=np.arange(2 * self.N),
                                 lower=-100 - np.arange(2 * self.N),
                                 value=0)

        # Constraints
        self.optProb.addCon("con1", upper=100, wrt=["x"])
        self.optProb.addCon("con2", upper=100)
        self.optProb.addCon("con3", lower=4, wrt=["x", "z"])
        xJac = np.ones((self.N, 1))
        if sparse:
            yJac = scipy.sparse.spdiags(np.ones(self.N), 0, self.N, self.N)
        else:
            yJac = np.eye(self.N)
        self.optProb.addConGroup(
            "lincon",
            self.N,
            lower=2 - 3 * np.arange(self.N),
            linear=True,
            wrt=["x", "y"],
            jac={
                "x": xJac,
                "y": yJac
            },
        )
        self.optProb.addObj("obj")
Esempio n. 23
0
    def optimize(self, optName, optOptions={}, storeHistory=False, places=5):
        # Optimization Object
        optProb = Optimization('large and sparse', objfunc)

        # Design Variables
        optProb.addVar('x', lower=-100, upper=150, value=0)
        optProb.addVarGroup('y',
                            N,
                            lower=-10 - arange(N),
                            upper=arange(N),
                            value=0)
        optProb.addVarGroup('z',
                            2 * N,
                            upper=arange(2 * N),
                            lower=-100 - arange(2 * N),
                            value=0)

        # Constraints
        optProb.addCon('con1', upper=100, wrt=['x'])
        optProb.addCon('con2', upper=100)
        optProb.addCon('con3', lower=4, wrt=['x', 'z'])
        optProb.addConGroup('lincon',
                            N,
                            lower=2 - 3 * arange(N),
                            linear=True,
                            wrt=['x', 'y'],
                            jac={
                                'x': numpy.ones((N, 1)),
                                'y': sparse.spdiags(numpy.ones(N), 0, N, N)
                            })
        optProb.addObj('obj')

        # Optimizer
        try:
            opt = OPT(optName, options=optOptions)
        except:
            raise unittest.SkipTest('Optimizer not available:', optName)

        sol = opt(optProb, sens=sens)

        # Check Solution
        self.assertAlmostEqual(sol.objectives['obj'].value,
                               10.0,
                               places=places)

        self.assertAlmostEqual(sol.variables['x'][0].value, 2.0, places=places)
    def test_obj(self):
        termcomp = TerminateComp(max_obj=2)
        optProb = Optimization("Paraboloid", termcomp.objfunc)

        optProb.addVarGroup("x",
                            1,
                            varType="c",
                            lower=-50.0,
                            upper=50.0,
                            value=0.0)
        optProb.addVarGroup("y",
                            1,
                            varType="c",
                            lower=-50.0,
                            upper=50.0,
                            value=0.0)

        optProb.addObj("obj")

        optProb.addConGroup("con",
                            1,
                            lower=-15.0,
                            upper=-15.0,
                            wrt=["x", "y"],
                            linear=True,
                            jac=con_jac)

        test_name = "SNOPT_user_termination_obj"
        optOptions = {
            "Print file": f"{test_name}.out",
            "Summary file": f"{test_name}_summary.out",
        }
        try:
            opt = SNOPT(options=optOptions)
        except Error as e:
            if "There was an error importing" in e.message:
                raise unittest.SkipTest("Optimizer not available: SNOPT")
            raise e

        sol = opt(optProb, sens=termcomp.sens)

        self.assertEqual(termcomp.obj_count, 3)

        # Exit code for user requested termination.
        self.assertEqual(sol.optInform["value"], 71)
Esempio n. 25
0
    def test_opt_bug_print_2con(self):
        # Optimization Object
        optProb = Optimization("Paraboloid", objfunc_2con)

        # Design Variables
        optProb.addVarGroup("x", 1, varType="c", lower=-50.0, upper=50.0, value=0.0)
        optProb.addVarGroup("y", 1, varType="c", lower=-50.0, upper=50.0, value=0.0)

        # Objective
        optProb.addObj("obj")

        con_jac2 = {}
        con_jac2["x"] = -np.ones((2, 1))
        con_jac2["y"] = np.ones((2, 1))

        con_jac3 = {}
        con_jac3["x"] = -np.ones((3, 1))
        con_jac3["y"] = np.ones((3, 1))

        # Equality Constraint
        optProb.addConGroup("con", 2, lower=-15.0, upper=-15.0, wrt=["x", "y"], linear=True, jac=con_jac2)
        optProb.addConGroup("con2", 3, lower=-15.0, upper=-15.0, wrt=["x", "y"], linear=True, jac=con_jac3)

        # Check optimization problem:
        print(optProb)

        test_name = "bugfix_SNOPT_bug_print_2con"
        optOptions = {
            "Major feasibility tolerance": 1e-1,
            "Print file": f"{test_name}.out",
            "Summary file": f"{test_name}_summary.out",
        }

        # Optimizer
        try:
            opt = SNOPT(options=optOptions)
        except Error as e:
            if "There was an error importing" in e.message:
                raise unittest.SkipTest("Optimizer not available: SNOPT")
            raise e

        sol = opt(optProb, sens=sens)

        print(sol)
Esempio n. 26
0
    def test_opt_bug1(self):
        # Due to a new feature, there is a TypeError when you optimize a model without a constraint.
        optProb = Optimization('Paraboloid', objfunc_no_con)

        # Design Variables
        optProb.addVarGroup('x', 1, type='c', lower=-50.0, upper=50.0, value=0.0)
        optProb.addVarGroup('y', 1, type='c', lower=-50.0, upper=50.0, value=0.0)
        optProb.finalizeDesignVariables()

        # Objective
        optProb.addObj('obj')

        # Optimizer
        try:
            opt = SNOPT(optOptions = {'Major feasibility tolerance' : 1e-1})
        except:
            raise unittest.SkipTest('Optimizer not available: SNOPT')

        sol = opt(optProb, sens=sens)
Esempio n. 27
0
    def setup_optProb(self):
        # Optimization Object
        self.optProb = Optimization("HS15 Constraint Problem", self.objfunc)

        # Design Variables
        lower = [-5.0, -5.0]
        upper = [0.5, 5.0]
        value = [-2, 1.0]
        self.optProb.addVarGroup("xvars",
                                 2,
                                 lower=lower,
                                 upper=upper,
                                 value=value)

        # Constraints
        lower = [1.0, 0.0]
        upper = [None, None]
        self.optProb.addConGroup("con", 2, lower=lower, upper=upper)

        # Objective
        self.optProb.addObj("obj")
Esempio n. 28
0
    def optimize(self, optName, optOptions={}, storeHistory=False):
        # Optimization Object
        optProb = Optimization('HS15 Constraint Problem', self.objfunc)

        # Design Variables
        lower = [-5, -5]
        upper = [0.5, 5]
        value = [-2, 1]
        optProb.addVarGroup('xvars', 2, lower=lower, upper=upper, value=value)

        # Constraints
        lower = [1, 0]
        upper = [None, None]
        optProb.addConGroup('con', 2, lower=lower, upper=upper)

        # Objective
        optProb.addObj('obj')

        # Check optimization problem:
        # print(optProb)

        # Optimizer
        try:
            opt = OPT(optName, options=optOptions)
        except:
            raise unittest.SkipTest('Optimizer not available:', optName)

        # Solution
        if storeHistory:
            histFileName = '%s_hs015_Hist.hst' % (optName.lower())
        else:
            histFileName = None

        sol = opt(optProb, sens=self.sens, storeHistory=histFileName)

        # Check Solution
        self.assertAlmostEqual(sol.objectives['obj'].value, 306.5)

        self.assertAlmostEqual(sol.variables['xvars'][0].value, 0.5)
        self.assertAlmostEqual(sol.variables['xvars'][1].value, 2.0)
Esempio n. 29
0
    def test_obj(self):
        termcomp = TerminateComp(max_obj=2)
        optProb = Optimization('Paraboloid', termcomp.objfunc)

        optProb.addVarGroup('x',
                            1,
                            type='c',
                            lower=-50.0,
                            upper=50.0,
                            value=0.0)
        optProb.addVarGroup('y',
                            1,
                            type='c',
                            lower=-50.0,
                            upper=50.0,
                            value=0.0)
        optProb.finalizeDesignVariables()

        optProb.addObj('obj')

        optProb.addConGroup('con',
                            1,
                            lower=-15.0,
                            upper=-15.0,
                            wrt=['x', 'y'],
                            linear=True,
                            jac=con_jac)

        try:
            opt = SNOPT()
        except:
            raise unittest.SkipTest('Optimizer not available: SNOPT')

        sol = opt(optProb, sens=termcomp.sens)

        self.assertEqual(termcomp.obj_count, 3)

        # Exit code for user requested termination.
        self.assertEqual(sol.optInform['value'][0], 71)
Esempio n. 30
0
    def optimize(self, x0, alg='IPOPT', options={}):
        opt = {}
        opt.update(options)
        def objfun(xdict):
            x, fail = self.set_vars(xdict)
            funcs= {
                'obj': self.obj(x),
                'llcon': self.lifting_line_const(x),
                "wcon": self.enough_lift_const(x)
            }
            return funcs, fail
        optProb = Optimization('llOpt', objfun)
        ub = self.get_vars(self.bounds.ub, dic=True)
        lb = self.get_vars(self.bounds.lb, dic=True)
        x0 = self.get_vars(x0, dic=True)
        optProb.addVar('V', upper=ub['V'], lower=lb['V'], value=x0['V'])
        optProb.addVar('b', upper=ub['b'], lower=lb['b'], value=x0['b'])
        optProb.addVarGroup('c', self.N_th, upper=ub['c'], lower=lb['c'], value=x0['c'])
        optProb.addVarGroup('al', self.N_th, upper=ub['al'], lower=lb['al'], value=x0['al'])
        optProb.addVarGroup('A', self.N_A, upper=ub['A'], lower=lb['A'], value=x0['A'])
        optProb.addObj('obj')
        optProb.addConGroup('llcon', self.N_th, lower=0., upper=0.)
        optProb.addCon('wcon', lower=0., upper=0.)

        if alg== "IPOPT":
            opt = OPT(alg, options=options)
            sol = opt(optProb, sens='FD')
        else:
            raise NotImplementedError(f"No routine for algorithm {alg}")

        D = dict(
            al = [a.value for a in sol.variables['al']],
            c = [a.value for a in sol.variables['c']],
            A = [a.value for a in sol.variables['A']],
            b = sol.variables['b'][0].value,
            V = sol.variables['V'][0].value,
        )
        x = self.set_vars(D)[0]
        return x, sol