예제 #1
0
def set_pyoptsparse_opt(optname):
    """For testing, sets the pyoptsparse optimizer using the given optimizer
    name.  This may be modified based on the value of
    OPENMDAO_FORCE_PYOPTSPARSE_OPT.  This can be used on systems that have
    SNOPT installed to force them to use SLSQP in order to mimic our test
    machines on travis and appveyor.
    """

    OPT = None
    OPTIMIZER = None
    force = os.environ.get('OPENMDAO_FORCE_PYOPTSPARSE_OPT')
    if force:
        optname = force

    try:
        from pyoptsparse import OPT
        try:
            OPT(optname)
            OPTIMIZER = optname
        except:
            if optname != 'SLSQP':
                try:
                    OPT('SLSQP')
                    OPTIMIZER = 'SLSQP'
                except:
                    pass
    except:
        pass

    return OPT, OPTIMIZER
예제 #2
0
    def test_autorefine(self):
        # Optimization Object
        optProb = Optimization("TP109 Constraint Problem", objfunc)

        # Design Variables (Removed infinite bounds for ALPSO)
        lower = [0.0, 0.0, -0.55, -0.55, 196, 196, 196, -400, -400]
        upper = [2000, 2000, 0.55, 0.55, 252, 252, 252, 800, 800]
        value = [0, 0, 0, 0, 0, 0, 0, 0, 0]
        optProb.addVarGroup("xvars", 9, lower=lower, upper=upper, value=value)

        # Constraints
        lower = [0, 0, 0, 0, 0, 0, 0, 0]
        upper = [None, None, 0, 0, 0, 0, 0, 0]
        if not USE_LINEAR:
            lower.extend([0, 0])
            upper.extend([None, None])

        optProb.addConGroup("con", len(lower), lower=lower, upper=upper)

        # And the 2 linear constriants
        if USE_LINEAR:
            jac = np.zeros((1, 9))
            jac[0, 3] = 1.0
            jac[0, 2] = -1.0
            optProb.addConGroup("lin_con",
                                1,
                                lower=-0.55,
                                upper=0.55,
                                wrt=["xvars"],
                                jac={"xvars": jac},
                                linear=True)

        # Objective
        optProb.addObj("obj")

        # Check optimization problem:
        # optProb.printSparsity()

        # Global Optimizer: ALPSO
        try:
            opt1 = OPT("ALPSO")
        except Error:
            raise unittest.SkipTest("Optimizer not available:", "ALPSO")

        # Get first Solution
        sol1 = opt1(optProb)

        # Now run the previous solution with SNOPT
        try:
            opt2 = OPT("SNOPT")
        except Error:
            raise unittest.SkipTest("Optimizer not available:", "SNOPT")

        sol2 = opt2(sol1)

        # Check Solution
        assert_allclose(sol2.objectives["obj"].value,
                        0.536206927538e04,
                        atol=1e-2,
                        rtol=1e-2)
예제 #3
0
def set_pyoptsparse_opt(optname, fallback=True):
    """
    For testing, sets the pyoptsparse optimizer using the given optimizer name.

    This may be modified based on the value of
    OPENMDAO_FORCE_PYOPTSPARSE_OPT. This can be used on systems that have
    SNOPT installed to force them to use SLSQP in order to mimic our test
    machines on travis and appveyor.

    Parameters
    ----------
    optname : str
        Name of pyoptsparse optimizer that is requested by the test.
    fallback : bool
        If True, fall back to SLSQP if optname can't be found

    Returns
    -------
    object
        Pyoptsparse optimizer instance.
    str
        Pyoptsparse optimizer string
    """
    OPT = None
    opt = None
    OPTIMIZER = None
    force = os.environ.get('OPENMDAO_FORCE_PYOPTSPARSE_OPT')
    if force:
        optname = force

    try:
        from pyoptsparse import OPT
        try:
            opt = OPT(optname)
            OPTIMIZER = optname
        except Exception:
            if fallback and optname != 'SLSQP':
                try:
                    opt = OPT('SLSQP')
                    OPTIMIZER = 'SLSQP'
                except Exception:
                    pass
        else:
            if fallback and isinstance(opt, Mock):
                try:
                    opt = OPT('SLSQP')
                    OPTIMIZER = 'SLSQP'
                except Exception:
                    pass
    except Exception:
        pass

    if isinstance(opt, Mock):
        OPT = OPTIMIZER = None

    if not fallback and OPTIMIZER != optname:
        raise unittest.SkipTest("pyoptsparse is not providing %s" % optname)

    return OPT, OPTIMIZER
예제 #4
0
    def test_autorefine(self):
        # Optimization Object
        optProb = Optimization('TP109 Constraint Problem', objfunc)

        # Design Variables (Removed infinite bounds for ALPSO)
        lower = [0.0, 0.0, -0.55, -0.55, 196, 196, 196, -400, -400]
        upper = [2000, 2000, 0.55, 0.55, 252, 252, 252, 800, 800]
        value = [0, 0, 0, 0, 0, 0, 0, 0, 0]
        optProb.addVarGroup('xvars', 9, lower=lower, upper=upper, value=value)

        # Constraints
        lower = [0, 0, 0, 0, 0, 0, 0, 0]
        upper = [None, None, 0, 0, 0, 0, 0, 0]
        if not USE_LINEAR:
            lower.extend([0, 0])
            upper.extend([None, None])

        optProb.addConGroup('con', len(lower), lower=lower, upper=upper)

        # And the 2 linear constriants
        if USE_LINEAR:
            jac = numpy.zeros((1, 9))
            jac[0, 3] = 1.0
            jac[0, 2] = -1.0
            optProb.addConGroup('lin_con',
                                1,
                                lower=-.55,
                                upper=0.55,
                                wrt=['xvars'],
                                jac={'xvars': jac},
                                linear=True)

        # Objective
        optProb.addObj('obj')

        # Check optimization problem:
        #optProb.printSparsity()

        # Global Optimizer: ALPSO
        try:
            opt1 = OPT('ALPSO')
        except:
            raise unittest.SkipTest('Optimizer not available:', 'ALPSO')

        # Get first Solution
        sol1 = opt1(optProb)

        # Now run the previous solution with SNOPT
        try:
            opt2 = OPT('SNOPT')
        except:
            raise unittest.SkipTest('Optimizer not available:', 'SNOPT')

        sol2 = opt2(sol1)

        # Check Solution
        self.assertAlmostEqual(sol2.objectives['obj'].value,
                               0.536206927538e+04,
                               places=2)
예제 #5
0
    def __call__(self, optimizer, options=None):
        """ Run optimization """
        system = self._system
        variables = self._variables

        opt_prob = OptProblem('Optimization', self.obj_func)
        for dv_name in variables['dv'].keys():
            dv = variables['dv'][dv_name]
            dv_id = dv['ID']
            value = dv['value']
            lower = dv['lower']
            upper = dv['upper']
            size = system.vec['u'](dv_id).shape[0]
            opt_prob.addVarGroup(dv_name, size, value=value,
                                 lower=lower, upper=upper)
        opt_prob.finalizeDesignVariables()
        for func_name in variables['func'].keys():
            func = variables['func'][func_name]
            func_id = func['ID']
            lower = func['lower']
            upper = func['upper']
            linear = func['linear']
            get_jacs = func['get_jacs']
            size = system.vec['u'](func_id).shape[0]
            if lower is None and upper is None:
                opt_prob.addObj(func_name)
            else:
                if func['get_jacs'] is None:
                    opt_prob.addConGroup(func_name, size,
                                         lower=lower, upper=upper)
                else:
                    jacs_var = get_jacs()

                    dv_names = []
                    jacs = {}
                    for dv_var in jacs_var:
                        dv_id = self._system.get_id(dv_var)
                        dv_name = self._get_name(dv_id)
                        dv_names.append(dv_name)
                        jacs[dv_name] = jacs_var[dv_var]

                    opt_prob.addConGroup(func_name, size,
                                         wrt=dv_names,
                                         jac=jacs, linear=linear,
                                         lower=lower, upper=upper)

        if options is None:
            options = {}

        opt = Optimizer(optimizer, options=options)
        opt.setOption('Iterations limit', int(1e6))
        #opt.setOption('Verify level', 3)
        sol = opt(opt_prob, sens=self.sens_func, storeHistory='hist.hst')
        print sol
예제 #6
0
    def decorator(obj):

        import unittest
        try:
            from pyoptsparse import OPT

        except Exception:
            msg = "pyoptsparse is not installed."

            if not isinstance(obj, type):
                @functools.wraps(obj)
                def skip_wrapper(*args, **kwargs):
                    raise unittest.SkipTest(msg)
                obj = skip_wrapper
            obj.__unittest_skip__ = True
            obj.__unittest_skip_why__ = msg
            return obj

        try:
            OPT(optimizer)
        except Exception:
            msg = "pyoptsparse is not providing %s" % optimizer

            if not isinstance(obj, type):
                @functools.wraps(obj)
                def skip_wrapper(*args, **kwargs):
                    raise unittest.SkipTest(msg)
                obj = skip_wrapper
            obj.__unittest_skip__ = True
            obj.__unittest_skip_why__ = msg

        return obj
예제 #7
0
    def optimize(self, optName, optOptions={}, storeHistory=False, places=5):
        # Optimization Object
        optProb = Optimization('HS071 Constraint Problem', objfunc)

        # Design Variables
        x0 = [1.0, 5.0, 5.0, 1.0]
        optProb.addVarGroup('xvars', 4, lower=1, upper=5, value=x0)

        # Constraints
        optProb.addConGroup('con', 2, lower=[25, 40], upper=[None, 40])

        # Objective
        optProb.addObj('obj')

        # Optimizer
        try:
            opt = OPT(optName, options=optOptions)
        except:
            raise unittest.SkipTest('Optimizer not available:', optName)

        sol = opt(optProb, sens=sens)

        # Check Solution
        self.assertAlmostEqual(sol.objectives['obj'].value, 17.0140172, places=places)

        self.assertAlmostEqual(sol.variables['xvars'][0].value, 1.0, places=places)
        self.assertAlmostEqual(sol.variables['xvars'][1].value, 4.743, places=places)
        self.assertAlmostEqual(sol.variables['xvars'][2].value, 3.82115, places=places)
        self.assertAlmostEqual(sol.variables['xvars'][3].value, 1.37941, places=places)

        if hasattr(sol, 'lambdaStar'):
            self.assertAlmostEqual(sol.lambdaStar['con'][0], 0.55229366, places=places)
            self.assertAlmostEqual(sol.lambdaStar['con'][1], -0.16146857, places=places)
예제 #8
0
    def optimize(self, optName, tol, optOptions={}, storeHistory=False, hotStart=None):
        self.nf = 0  # number of function evaluations
        self.ng = 0  # number of gradient evaluations
        # Optimization Object
        optProb = Optimization("HS15 Constraint Problem", self.objfunc)

        # Design Variables
        lower = [-5.0, -5.0]
        upper = [0.5, 5.0]
        value = [-2, 1.0]
        optProb.addVarGroup("xvars", 2, lower=lower, upper=upper, value=value)

        # Constraints
        lower = [1.0, 0.0]
        upper = [None, None]
        optProb.addConGroup("con", 2, lower=lower, upper=upper)

        # Objective
        optProb.addObj("obj")

        # Check optimization problem:
        print(optProb)

        # Optimizer
        try:
            opt = OPT(optName, options=optOptions)
        except Error:
            raise unittest.SkipTest("Optimizer not available:", optName)

        # Solution
        if storeHistory is not None:
            if storeHistory is True:
                self.histFileName = "%s_hs015_Hist.hst" % (optName.lower())
            elif isinstance(storeHistory, str):
                self.histFileName = storeHistory
        else:
            self.histFileName = None

        sol = opt(optProb, sens=self.sens, storeHistory=self.histFileName, hotStart=hotStart)

        # Test printing solution to screen
        print(sol)

        # Check Solution
        self.fStar1 = 306.5
        self.fStar2 = 360.379767

        self.xStar1 = (0.5, 2.0)
        self.xStar2 = (-0.79212322, -1.26242985)

        dv = sol.getDVs()
        sol_xvars = [sol.variables["xvars"][i].value for i in range(2)]
        assert_allclose(sol_xvars, dv["xvars"], atol=tol, rtol=tol)
        # we check either optimum via try/except
        try:
            assert_allclose(sol.objectives["obj"].value, self.fStar1, atol=tol, rtol=tol)
            assert_allclose(dv["xvars"], self.xStar1, atol=tol, rtol=tol)
        except AssertionError:
            assert_allclose(sol.objectives["obj"].value, self.fStar2, atol=tol, rtol=tol)
            assert_allclose(dv["xvars"], self.xStar2, atol=tol, rtol=tol)
예제 #9
0
    def test_dynamic_simul_coloring_pyoptsparse_slsqp(self):
        try:
            from pyoptsparse import OPT
        except ImportError:
            raise unittest.SkipTest("This test requires pyoptsparse.")

        try:
            OPT('SLSQP')
        except:
            raise unittest.SkipTest("This test requires pyoptsparse SLSQP.")

        p_color = run_opt(pyOptSparseDriver, 'rev', optimizer='SLSQP', print_results=False,
                          dynamic_simul_derivs=True)
        assert_almost_equal(p_color['circle.area'], np.pi, decimal=7)

        # run w/o coloring
        p = run_opt(pyOptSparseDriver, 'rev', optimizer='SLSQP', print_results=False)
        assert_almost_equal(p['circle.area'], np.pi, decimal=7)

        # - coloring saves 11 solves per driver iter  (11 vs 22)
        # - initial solve for linear constraints takes 1 in both cases (only done once)
        # - dynamic case does 3 full compute_totals to compute coloring, which adds 22 * 3 solves
        # - (total_solves - N) / (solves_per_iter) should be equal between the two cases,
        # - where N is 1 for the uncolored case and 22 * 3 + 1 for the dynamic colored case.
        self.assertEqual((p.model.linear_solver._solve_count - 1) / 22,
                         (p_color.model.linear_solver._solve_count - 1 - 22 * 3) / 11)
예제 #10
0
    def test_sparsity_pyoptsparse_slsqp(self):
        try:
            from pyoptsparse import OPT
        except ImportError:
            raise unittest.SkipTest("This test requires pyoptsparse.")

        try:
            OPT('SLSQP')
        except:
            raise unittest.SkipTest("This test requires pyoptsparse SLSQP.")

        # first, run without sparsity
        p = run_opt(pyOptSparseDriver, 'fwd', optimizer='SLSQP', print_results=False)

        # run with dynamic sparsity
        p_dynamic = run_opt(pyOptSparseDriver, 'fwd', dynamic_derivs_sparsity=True,
                            optimizer='SLSQP', print_results=False)

        # run with provided sparsity
        p_sparsity = run_opt(pyOptSparseDriver, 'fwd', sparsity=self.sparsity,
                             optimizer='SLSQP', print_results=False)

        assert_almost_equal(p['circle.area'], np.pi, decimal=7)
        assert_almost_equal(p_dynamic['circle.area'], np.pi, decimal=7)
        assert_almost_equal(p_sparsity['circle.area'], np.pi, decimal=7)
def large_sparse(optimizer="SNOPT", optOptions=None):
    opt_options = {} if optOptions is None else optOptions

    # Optimization Object
    optProb = Optimization("large and sparse", objfunc)

    # Design Variables
    optProb.addVar("x", lower=-100, upper=150, value=0)
    optProb.addVarGroup("y", N, lower=-10 - arange(N), upper=arange(N), value=0)
    optProb.addVarGroup("z", 2 * N, upper=arange(2 * N), lower=-100 - arange(2 * N), value=0)
    # Constraints
    optProb.addCon("con1", upper=100, wrt=["x"])
    optProb.addCon("con2", upper=100)
    optProb.addCon("con3", lower=4, wrt=["x", "z"])
    optProb.addConGroup(
        "lincon",
        N,
        lower=2 - 3 * arange(N),
        linear=True,
        wrt=["x", "y"],
        jac={"x": np.ones((N, 1)), "y": sparse.spdiags(np.ones(N), 0, N, N)},
    )
    optProb.addObj("obj")
    # Optimizer
    opt = OPT(optimizer, options=opt_options)
    optProb.printSparsity()

    return opt, optProb
예제 #12
0
    def optimize(self, optName, tol, optOptions={}, storeHistory=False):
        # Optimization Object
        optProb = Optimization("large and sparse", objfunc)

        # Design Variables
        optProb.addVar("x", lower=-100, upper=150, value=0)
        optProb.addVarGroup("y", N, lower=-10 - arange(N), upper=arange(N), value=0)
        optProb.addVarGroup("z", 2 * N, upper=arange(2 * N), lower=-100 - arange(2 * N), value=0)

        # Constraints
        optProb.addCon("con1", upper=100, wrt=["x"])
        optProb.addCon("con2", upper=100)
        optProb.addCon("con3", lower=4, wrt=["x", "z"])
        optProb.addConGroup(
            "lincon",
            N,
            lower=2 - 3 * arange(N),
            linear=True,
            wrt=["x", "y"],
            jac={"x": np.ones((N, 1)), "y": sparse.spdiags(np.ones(N), 0, N, N)},
        )
        optProb.addObj("obj")

        # Optimizer
        try:
            opt = OPT(optName, options=optOptions)
        except Error:
            raise unittest.SkipTest("Optimizer not available:", optName)

        sol = opt(optProb, sens=sens)

        # Check Solution
        assert_allclose(sol.objectives["obj"].value, 10.0, atol=tol, rtol=tol)

        assert_allclose(sol.variables["x"][0].value, 2.0, atol=tol, rtol=tol)
예제 #13
0
    def setup_optProb(self, nObj=1, nDV=[4], nCon=[2], xScale=[1.0], objScale=[1.0], conScale=[1.0], offset=[0.0]):
        """
        This function sets up a general optimization problem, with arbitrary
        DVs, constraints and objectives.
        Arbitrary scaling for the various parameters can also be specified.
        """
        self.nObj = nObj
        self.nDV = nDV
        self.nCon = nCon
        self.xScale = xScale
        self.objScale = objScale
        self.conScale = conScale
        self.offset = offset

        # Optimization Object
        self.optProb = Optimization("Configurable Test Problem", self.objfunc)
        self.x0 = {}
        # Design Variables
        for iDV in range(len(nDV)):
            n = nDV[iDV]
            lower = np.random.uniform(-5, 2, n)
            upper = np.random.uniform(5, 20, n)
            x0 = np.random.uniform(lower, upper)
            dvName = "x{}".format(iDV)
            self.x0[dvName] = x0
            self.optProb.addVarGroup(
                dvName,
                n,
                lower=lower,
                upper=upper,
                value=x0,
                scale=xScale[iDV],
                offset=offset[iDV],
            )

        # Constraints
        for iCon in range(len(nCon)):
            nc = nCon[iCon]
            lower = np.random.uniform(-5, 2, nc)
            upper = np.random.uniform(5, 6, nc)
            self.optProb.addConGroup(
                "con_{}".format(iCon),
                nc,
                lower=lower,
                upper=upper,
                scale=conScale[iCon],
            )

        # Objective
        for iObj in range(nObj):
            self.optProb.addObj("obj_{}".format(iObj), scale=objScale[iObj])

        # Finalize
        self.optProb.printSparsity()
        # run optimization
        # we don't care about outputs, but this performs optimizer-specific re-ordering
        # of constraints so we need this to test mappings
        opt = OPT("slsqp", options={"IFILE": "optProb_SLSQP.out"})
        opt(self.optProb, "FD")
예제 #14
0
    def optimize(self, optName, optOptions={}, storeHistory=False, places=5):
        # Optimization Object
        optProb = Optimization('HS15 Constraint Problem', self.objfunc)

        # Design Variables
        lower = [-5.0, -5.0]
        upper = [0.5, 5.0]
        value = [-2, 1.0]
        optProb.addVarGroup('xvars', 2, lower=lower, upper=upper, value=value)

        # Constraints
        lower = [1.0, 0.0]
        upper = [None, None]
        optProb.addConGroup('con', 2, lower=lower, upper=upper)

        # Objective
        optProb.addObj('obj')

        # Check optimization problem:
        # print(optProb)

        # Optimizer
        try:
            opt = OPT(optName, options=optOptions)
        except:
            raise unittest.SkipTest('Optimizer not available:', optName)

        # Solution
        if storeHistory:
            histFileName = '%s_hs015_Hist.hst' % (optName.lower())
        else:
            histFileName = None

        sol = opt(optProb, sens=self.sens, storeHistory=histFileName)

        # Test printing solution to screen
        print(sol)

        # Check Solution
        fobj = sol.objectives['obj'].value
        diff = np.min(np.abs([fobj - 306.5, fobj - 360.379767]))
        self.assertAlmostEqual(diff, 0.0, places=places)

        xstar1 = (0.5, 2.0)
        xstar2 = (-0.79212322, -1.26242985)
        x1 = sol.variables['xvars'][0].value
        x2 = sol.variables['xvars'][1].value

        dv = sol.getDVs()
        self.assertAlmostEqual(x1, dv['xvars'][0], places=10)
        self.assertAlmostEqual(x2, dv['xvars'][1], places=10)

        diff = np.min(np.abs([xstar1[0] - x1, xstar2[0] - x1]))
        self.assertAlmostEqual(diff, 0.0, places=places)

        diff = np.min(np.abs([xstar1[1] - x2, xstar2[1] - x2]))
        self.assertAlmostEqual(diff, 0.0, places=places)
예제 #15
0
    def optimize(self, optName, tol, optOptions={}, storeHistory=False, hotStart=None):
        self.nf = 0  # number of function evaluations
        self.ng = 0  # number of gradient evaluations
        # Optimization Object

        optProb = Optimization("Rosenbrock Problem", self.objfunc)

        n = 4  # Number of design variables
        np.random.seed(10)
        value = np.random.normal(size=n)

        lower = np.ones(n) * -50
        upper = np.ones(n) * 50
        optProb.addVarGroup("xvars", n, lower=lower, upper=upper, value=value)

        # Objective
        optProb.addObj("obj")

        # Check optimization problem:
        print(optProb)

        # Optimizer
        try:
            opt = OPT(optName, options=optOptions)
        except Error:
            raise unittest.SkipTest("Optimizer not available:", optName)

        # Solution
        if storeHistory is not None:
            if storeHistory is True:
                self.histFileName = "%s_Rsbrk_Hist.hst" % (optName.lower())
            elif isinstance(storeHistory, str):
                self.histFileName = storeHistory
        else:
            self.histFileName = None

        sol = opt(optProb, sens=self.sens, storeHistory=self.histFileName, hotStart=hotStart)

        # Test printing solution to screen
        print(sol)

        # Check Solution
        self.fStar1 = 0.0

        self.xStar1 = np.ones(n)

        dv = sol.getDVs()
        sol_xvars = [sol.variables["xvars"][i].value for i in range(n)]

        assert_allclose(sol_xvars, dv["xvars"], atol=tol, rtol=tol)
        assert_allclose(dv["xvars"], self.xStar1, atol=tol, rtol=tol)
        if optName == "SNOPT" and opt.version != "7.7.7":
            assert_allclose(sol.objectives["obj"].value, self.fStar1, atol=tol, rtol=tol)
        else:
            assert_allclose(sol.fStar, self.fStar1, atol=tol, rtol=tol)
예제 #16
0
def pyopt_truss(truss, optimizer='snopt', options={}):
    '''
    Take the given problem and optimize it with the given optimizer
    from the pyOptSparse library of optimizers.
    '''
    # Import the optimization problem
    from pyoptsparse import Optimization, OPT

    class pyOptWrapper:
        def __init__(self, truss):
            self.truss = truss

        def objcon(self, x):
            fail, obj, con = self.truss.evalObjCon(x['x'])
            funcs = {'objective': obj, 'con': con}
            return funcs, fail

        def gobjcon(self, x, funcs):
            g = np.zeros(x['x'].shape)
            A = np.zeros((1, x['x'].shape[0]))
            fail = self.truss.evalObjConGradient(x['x'], g, A)
            sens = {'objective': {'x': g}, 'con': {'x': A}}
            return sens, fail

    # Set the design variables
    wrap = pyOptWrapper(truss)
    prob = Optimization('Truss', wrap.objcon)

    # Determine the initial variable values and their lower/upper
    # bounds in the design problem
    n = len(truss.conn)
    x0 = np.zeros(n)
    lower = np.zeros(n)
    upper = np.zeros(n)
    truss.getVarsAndBounds(x0, lower, upper)

    # Set the variable bounds and initial values
    prob.addVarGroup('x', n, value=x0, lower=lower, upper=upper)

    # Set the constraints
    prob.addConGroup('con', 1, lower=0.0, upper=0.0)

    # Add the objective
    prob.addObj('objective')

    # Optimize the problem
    try:
        opt = OPT(optimizer, options=options)
        sol = opt(prob, sens=wrap.gobjcon)
    except:
        opt = None
        sol = None

    return opt, prob, sol
예제 #17
0
    def test_sparsity_pyoptsparse_slsqp(self):
        try:
            from pyoptsparse import OPT
        except ImportError:
            raise unittest.SkipTest("This test requires pyoptsparse.")

        try:
            OPT('SLSQP')
        except:
            raise unittest.SkipTest("This test requires pyoptsparse SLSQP.")

        sparsity = {
            "circle.area": {
                "indeps.x": [[], [], [1, 10]],
                "indeps.y": [[], [], [1, 10]],
                "indeps.r": [[0], [0], [1, 1]]
            },
            "r_con.g": {
                "indeps.x": [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
                             [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [10, 10]],
                "indeps.y": [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
                             [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [10, 10]],
                "indeps.r": [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
                             [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [10, 1]]
            },
            "theta_con.g": {
                "indeps.x": [[0, 1, 2, 3, 4], [0, 2, 4, 6, 8], [5, 10]],
                "indeps.y": [[0, 1, 2, 3, 4], [0, 2, 4, 6, 8], [5, 10]],
                "indeps.r": [[], [], [5, 1]]
            },
            "delta_theta_con.g": {
                "indeps.x": [[0, 0, 1, 1, 2, 2, 3, 3, 4, 4],
                             [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [5, 10]],
                "indeps.y": [[0, 0, 1, 1, 2, 2, 3, 3, 4, 4],
                             [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [5, 10]],
                "indeps.r": [[], [], [5, 1]]
            },
            "l_conx.g": {
                "indeps.x": [[0], [0], [1, 10]],
                "indeps.y": [[], [], [1, 10]],
                "indeps.r": [[], [], [1, 1]]
            }
        }

        p_sparsity = run_opt(pyOptSparseDriver,
                             sparsity=sparsity,
                             optimizer='SLSQP',
                             print_results=False)
        assert_almost_equal(p_sparsity['circle.area'], np.pi, decimal=7)

        # run w/o coloring
        p = run_opt(pyOptSparseDriver, optimizer='SLSQP', print_results=False)
        assert_almost_equal(p['circle.area'], np.pi, decimal=7)
예제 #18
0
    def optimize(
        self,
        optName,
        tol,
        optOptions={},
        storeHistory=False,
        setDV=None,
        xScale=1.0,
        objScale=1.0,
        conScale=1.0,
        offset=0.0,
        check_solution=True,
    ):
        # Optimization Object
        optProb = Optimization("HS071 Constraint Problem", self.objfunc)

        # Design Variables
        x0 = [1.0, 5.0, 5.0, 1.0]
        optProb.addVarGroup("xvars", 4, lower=1, upper=5, value=x0, scale=xScale, offset=offset)

        # Constraints
        optProb.addConGroup("con", 2, lower=[25, 40], upper=[None, 40], scale=conScale)

        # Objective
        optProb.addObj("obj", scale=objScale)

        # Optimizer
        try:
            opt = OPT(optName, options=optOptions)
        except Error:
            raise unittest.SkipTest("Optimizer not available:", optName)

        if isinstance(setDV, str):
            optProb.setDVsFromHistory(setDV)
        elif isinstance(setDV, dict):
            optProb.setDVs(setDV)
            outDV = optProb.getDVs()
            assert_allclose(setDV["xvars"], outDV["xvars"])

        sol = opt(optProb, sens=self.sens, storeHistory=storeHistory)

        # Check Solution
        if check_solution:
            self.fStar = 17.0140172
            self.xStar = (1.0, 4.743, 3.82115, 1.37941)
            self.lambdaStar = (0.55229366, -0.16146857)
            assert_allclose(sol.objectives["obj"].value, self.fStar, atol=tol, rtol=tol)
            assert_allclose(sol.xStar["xvars"], self.xStar, atol=tol, rtol=tol)

            if hasattr(sol, "lambdaStar"):
                assert_allclose(sol.lambdaStar["con"], self.lambdaStar, atol=tol, rtol=tol)
        return sol
예제 #19
0
    def optimize(self, optName, tol, optOptions={}):
        # Optimization Object
        optProb = Optimization("TP109 Constraint Problem", objfunc)

        # Design Variables
        lower = [0.0, 0.0, -0.55, -0.55, 196, 196, 196, -400, -400]
        upper = [None, None, 0.55, 0.55, 252, 252, 252, 800, 800]
        value = [0, 0, 0, 0, 0, 0, 0, 0, 0]
        optProb.addVarGroup("xvars", 9, lower=lower, upper=upper, value=value)

        # Constraints
        lower = [0, 0, 0, 0, 0, 0, 0, 0]
        upper = [None, None, 0, 0, 0, 0, 0, 0]
        if not USE_LINEAR:
            lower.extend([0, 0])
            upper.extend([None, None])

        optProb.addConGroup("con", len(lower), lower=lower, upper=upper)

        # And the 2 linear constriants
        if USE_LINEAR:
            jac = np.zeros((1, 9))
            jac[0, 3] = 1.0
            jac[0, 2] = -1.0
            optProb.addConGroup("lin_con",
                                1,
                                lower=-0.55,
                                upper=0.55,
                                wrt=["xvars"],
                                jac={"xvars": jac},
                                linear=True)

        # Objective
        optProb.addObj("obj")

        # Check optimization problem:
        # optProb.printSparsity()

        # Optimizer
        try:
            opt = OPT(optName, options=optOptions)
        except Error:
            raise unittest.SkipTest("Optimizer not available:", optName)

        # Solution
        sol = opt(optProb, sens="CS")

        # Check Solution
        assert_allclose(sol.objectives["obj"].value,
                        0.536206927538e04,
                        atol=tol,
                        rtol=tol)
예제 #20
0
    def optimize(self, optName, optOptions={}, places=2):
        # Optimization Object
        optProb = Optimization('TP109 Constraint Problem', objfunc)

        # Design Variables
        lower = [0.0, 0.0, -0.55, -0.55, 196, 196, 196, -400, -400]
        upper = [None, None, 0.55, 0.55, 252, 252, 252, 800, 800]
        value = [0, 0, 0, 0, 0, 0, 0, 0, 0]
        optProb.addVarGroup('xvars', 9, lower=lower, upper=upper, value=value)

        # Constraints
        lower = [0, 0, 0, 0, 0, 0, 0, 0]
        upper = [None, None, 0, 0, 0, 0, 0, 0]
        if not USE_LINEAR:
            lower.extend([0, 0])
            upper.extend([None, None])

        optProb.addConGroup('con', len(lower), lower=lower, upper=upper)

        # And the 2 linear constriants
        if USE_LINEAR:
            jac = numpy.zeros((1, 9))
            jac[0, 3] = 1.0
            jac[0, 2] = -1.0
            optProb.addConGroup('lin_con',
                                1,
                                lower=-.55,
                                upper=0.55,
                                wrt=['xvars'],
                                jac={'xvars': jac},
                                linear=True)

        # Objective
        optProb.addObj('obj')

        # Check optimization problem:
        #optProb.printSparsity()

        # Optimizer
        try:
            opt = OPT(optName, options=optOptions)
        except:
            raise unittest.SkipTest('Optimizer not available:', optName)

        # Solution
        sol = opt(optProb, sens='CS')

        # Check Solution
        self.assertAlmostEqual(sol.objectives['obj'].value,
                               0.536206927538e+04,
                               places=places)
예제 #21
0
    def optimize(self, optName, optOptions={}, storeHistory=False, places=5):
        # Optimization Object
        optProb = Optimization('large and sparse', objfunc)

        # Design Variables
        optProb.addVar('x', lower=-100, upper=150, value=0)
        optProb.addVarGroup('y',
                            N,
                            lower=-10 - arange(N),
                            upper=arange(N),
                            value=0)
        optProb.addVarGroup('z',
                            2 * N,
                            upper=arange(2 * N),
                            lower=-100 - arange(2 * N),
                            value=0)

        # Constraints
        optProb.addCon('con1', upper=100, wrt=['x'])
        optProb.addCon('con2', upper=100)
        optProb.addCon('con3', lower=4, wrt=['x', 'z'])
        optProb.addConGroup('lincon',
                            N,
                            lower=2 - 3 * arange(N),
                            linear=True,
                            wrt=['x', 'y'],
                            jac={
                                'x': numpy.ones((N, 1)),
                                'y': sparse.spdiags(numpy.ones(N), 0, N, N)
                            })
        optProb.addObj('obj')

        # Optimizer
        try:
            opt = OPT(optName, options=optOptions)
        except:
            raise unittest.SkipTest('Optimizer not available:', optName)

        sol = opt(optProb, sens=sens)

        # Check Solution
        self.assertAlmostEqual(sol.objectives['obj'].value,
                               10.0,
                               places=places)

        self.assertAlmostEqual(sol.variables['x'][0].value, 2.0, places=places)
예제 #22
0
    def optimize(self, optName, optOptions={}, storeHistory=False):
        # Optimization Object
        optProb = Optimization('HS15 Constraint Problem', self.objfunc)

        # Design Variables
        lower = [-5, -5]
        upper = [0.5, 5]
        value = [-2, 1]
        optProb.addVarGroup('xvars', 2, lower=lower, upper=upper, value=value)

        # Constraints
        lower = [1, 0]
        upper = [None, None]
        optProb.addConGroup('con', 2, lower=lower, upper=upper)

        # Objective
        optProb.addObj('obj')

        # Check optimization problem:
        # print(optProb)

        # Optimizer
        try:
            opt = OPT(optName, options=optOptions)
        except:
            raise unittest.SkipTest('Optimizer not available:', optName)

        # Solution
        if storeHistory:
            histFileName = '%s_hs015_Hist.hst' % (optName.lower())
        else:
            histFileName = None

        sol = opt(optProb, sens=self.sens, storeHistory=histFileName)

        # Check Solution
        self.assertAlmostEqual(sol.objectives['obj'].value, 306.5)

        self.assertAlmostEqual(sol.variables['xvars'][0].value, 0.5)
        self.assertAlmostEqual(sol.variables['xvars'][1].value, 2.0)
예제 #23
0
    def optimize(self, x0, alg='IPOPT', options={}):
        opt = {}
        opt.update(options)
        def objfun(xdict):
            x, fail = self.set_vars(xdict)
            funcs= {
                'obj': self.obj(x),
                'llcon': self.lifting_line_const(x),
                "wcon": self.enough_lift_const(x)
            }
            return funcs, fail
        optProb = Optimization('llOpt', objfun)
        ub = self.get_vars(self.bounds.ub, dic=True)
        lb = self.get_vars(self.bounds.lb, dic=True)
        x0 = self.get_vars(x0, dic=True)
        optProb.addVar('V', upper=ub['V'], lower=lb['V'], value=x0['V'])
        optProb.addVar('b', upper=ub['b'], lower=lb['b'], value=x0['b'])
        optProb.addVarGroup('c', self.N_th, upper=ub['c'], lower=lb['c'], value=x0['c'])
        optProb.addVarGroup('al', self.N_th, upper=ub['al'], lower=lb['al'], value=x0['al'])
        optProb.addVarGroup('A', self.N_A, upper=ub['A'], lower=lb['A'], value=x0['A'])
        optProb.addObj('obj')
        optProb.addConGroup('llcon', self.N_th, lower=0., upper=0.)
        optProb.addCon('wcon', lower=0., upper=0.)

        if alg== "IPOPT":
            opt = OPT(alg, options=options)
            sol = opt(optProb, sens='FD')
        else:
            raise NotImplementedError(f"No routine for algorithm {alg}")

        D = dict(
            al = [a.value for a in sol.variables['al']],
            c = [a.value for a in sol.variables['c']],
            A = [a.value for a in sol.variables['A']],
            b = sol.variables['b'][0].value,
            V = sol.variables['V'][0].value,
        )
        x = self.set_vars(D)[0]
        return x, sol
예제 #24
0
    def test_dynamic_total_coloring_pyoptsparse_slsqp_auto(self):
        try:
            from pyoptsparse import OPT
        except ImportError:
            raise unittest.SkipTest("This test requires pyoptsparse.")

        try:
            OPT('SLSQP')
        except:
            raise unittest.SkipTest("This test requires pyoptsparse SLSQP.")

        p_color = run_opt(pyOptSparseDriver,
                          'auto',
                          optimizer='SLSQP',
                          print_results=False,
                          dynamic_total_coloring=True)
        assert_almost_equal(p_color['circle.area'], np.pi, decimal=7)

        # run w/o coloring
        p = run_opt(pyOptSparseDriver,
                    'auto',
                    optimizer='SLSQP',
                    print_results=False)
        assert_almost_equal(p['circle.area'], np.pi, decimal=7)

        # - coloring saves 16 solves per driver iter  (5 vs 21)
        # - initial solve for linear constraints takes 21 in both cases (only done once)
        # - dynamic case does 3 full compute_totals to compute coloring, which adds 21 * 3 solves
        # - (total_solves - N) / (solves_per_iter) should be equal between the two cases,
        # - where N is 21 for the uncolored case and 21 * 4 for the dynamic colored case.
        self.assertEqual((p.model._solve_count - 21) / 21,
                         (p_color.model._solve_count - 21 * 4) / 5)

        # test __repr__
        rep = repr(p_color.driver._coloring_info['coloring'])
        self.assertEqual(
            rep.replace('L', ''),
            'Coloring (direction: fwd, ncolors: 5, shape: (22, 21)')
예제 #25
0
def large_sparse(optimizer='SNOPT', optOptions=None):
    opt_options = {} if optOptions is None else optOptions

    # Optimization Object
    optProb = Optimization('large and sparse', objfunc)

    # Design Variables
    optProb.addVar('x', lower=-100, upper=150, value=0)
    optProb.addVarGroup('y',
                        N,
                        lower=-10 - arange(N),
                        upper=arange(N),
                        value=0)
    optProb.addVarGroup('z',
                        2 * N,
                        upper=arange(2 * N),
                        lower=-100 - arange(2 * N),
                        value=0)
    # Constraints
    optProb.addCon('con1', upper=100, wrt=['x'])
    optProb.addCon('con2', upper=100)
    optProb.addCon('con3', lower=4, wrt=['x', 'z'])
    optProb.addConGroup('lincon',
                        N,
                        lower=2 - 3 * arange(N),
                        linear=True,
                        wrt=['x', 'y'],
                        jac={
                            'x': numpy.ones((N, 1)),
                            'y': sparse.spdiags(numpy.ones(N), 0, N, N)
                        })
    optProb.addObj('obj')
    # Optimizer
    opt = OPT(optimizer, options=opt_options)
    optProb.printSparsity()

    return opt, optProb
예제 #26
0
    def test_snopt_hotstart(self):
        self.optName = "SNOPT"
        self.setup_optProb()
        sol, restartDict = self.optimize(
            optOptions={"Return work arrays": True})
        # Check Solution
        self.assert_solution_allclose(sol, 1e-12)
        # Check informs
        self.assert_inform_equal(sol)
        # Check restartDict
        self.assertEqual({"cw", "iw", "rw", "xs", "hs", "pi"},
                         set(restartDict.keys()))

        # Now optimize again, but using the hotstart
        self.setup_optProb()
        self.nf = 0
        self.ng = 0
        opt = OPT(self.optName, options={"Start": "Hot", "Verify level": -1})
        sol = opt(self.optProb, sens=self.sens, restartDict=restartDict)
        # Check Solution
        self.assert_solution_allclose(sol, 1e-12)
        # Should only take one major iteration
        self.assertEqual(self.nf, 1)
        self.assertEqual(self.ng, 1)
예제 #27
0
    def optimize(self, x0, alg='IPOPT', options={}):
        opt = {}
        opt.update(options)
        def objfun(xdict):
            V = xdict['V']
            b = xdict['b']
            c = xdict['c']
            al = xdict['al']
            A, fail = self.ll(V, b, c, al)
            funcs= {
                'obj':10000. if fail else self.DoverL(V, b, c, al, A)
            }
            return funcs, fail
        optProb = Optimization('llOpt', objfun)
        ub = self.get_vars(self.bounds.ub, dic=True)
        lb = self.get_vars(self.bounds.lb, dic=True)
        x0 = self.get_vars(x0, dic=True)
        optProb.addVar('V', upper=ub['V'], lower=lb['V'], value=x0['V'])
        optProb.addVar('b', upper=ub['b'], lower=lb['b'], value=x0['b'])
        optProb.addVarGroup('c', self.N_th, upper=ub['c'], lower=lb['c'], value=x0['c'])
        optProb.addVarGroup('al', self.N_th, upper=ub['al'], lower=lb['al'], value=x0['al'])
        optProb.addObj('obj')

        if alg== "IPOPT":
            opt = OPT(alg, options=options)
            sol = opt(optProb, sens='FD')
        else:
            raise NotImplementedError(f"No routine for algorithm {alg}")
        D = dict(
        al = [a.value for a in sol.variables['al']],
        c = [a.value for a in sol.variables['c']],
        b = sol.variables['b'][0].value,
        V = sol.variables['V'][0].value
        )
        x = self.set_vars(D)[0]
        return x, sol
예제 #28
0
from openmdao.test.example_groups import ExampleGroup
from openmdao.test.sellar import SellarDerivativesGrouped
from openmdao.test.util import assert_rel_error
from openmdao.util.record_util import format_iteration_coordinate

from openmdao.recorders.sqlite_recorder import format_version

# check that pyoptsparse is installed
# if it is, try to use SLSQP
OPT = None
OPTIMIZER = None

try:
    from pyoptsparse import OPT
    try:
        OPT('SLSQP')
        OPTIMIZER = 'SLSQP'
    except:
        pass
except:
    pass

if OPTIMIZER:
    from openmdao.drivers.pyoptsparse_driver import pyOptSparseDriver

def run_problem(problem):
    t0 = time.time()
    problem.run()
    t1 = time.time()

    return t0, t1
예제 #29
0
    for i in xrange(nFlowCases):
        optProb.addCon('fc%d_CL_con' % i, lower=0.0, upper=0.0, scale=1.0)

    if gcomm.rank == 0:
        print(optProb)

    # The MP object needs the 'obj' and 'sens' function for each proc set,
    # the optimization problem and what the objcon function is:
    MP.setProcSetObjFunc(optVars[0] + '_mp', aeroFuncsMP)
    MP.setProcSetSensFunc(optVars[0] + '_mp', aeroFuncsSensMP)
    MP.setObjCon(objCon)
    MP.setOptProb(optProb)
    optProb.printSparsity()

    # Make Instance of Optimizer
    opt = OPT(args.opt, options=optOptions)

    # Run Optimization
    histFile = os.path.join(outputDirectory, '%s_hist.hst' % args.opt)
    sol = opt(optProb, MP.sens, storeHistory=histFile)
    if gcomm.rank == 0:
        print(sol)

elif task.lower() == 'solvecl':

    CFDSolver.setOption('usecoloring', False)

    xDV0 = DVGeo.getValues()
    alpha0 = xDV0['alpha']

    for i in range(10):
예제 #30
0
optFuncs.evalFuncs = evalFuncs
optFuncs.gcomm = gcomm
optFuncs.setMultiPointCondition = setMultiPointCondition
optFuncs.setMultiPointObjFuncs = setMultiPointObjFuncs
optFuncs.setMultiPointObjFuncsSens = setMultiPointObjFuncsSens

# Optimize
DASolver.runColoring()
optProb = Optimization("opt", optFuncs.calcObjFuncValuesMP, comm=gcomm)
DVGeo.addVariablesPyOpt(optProb)
DVCon.addConstraintsPyOpt(optProb)
# Add objective
optProb.addObj("CD", scale=1)
# Add physical constraints
optProb.addCon("CL", lower=CL_target, upper=CL_target, scale=1)
optProb.addCon("CMZ", lower=CM_target, upper=CM_target, scale=1)

if gcomm.rank == 0:
    print(optProb)

opt = OPT("slsqp", options=optOptions)
histFile = os.path.join("./", "slsqp_hist.hst")
sol = opt(optProb, sens=optFuncs.calcObjFuncSensMP, storeHistory=histFile)

if gcomm.rank == 0:
    print(sol)

xDVs = DVGeo.getValues()
if gcomm.rank == 0:
    reg_write_dict(xDVs, 1e-6, 1e-8)
예제 #31
0
    def __call__(self, optimizer, options=None):
        """ Run optimization """
        system = self._system
        variables = self._variables

        opt_prob = OptProblem('Optimization', self.obj_func)
        for dv_name in variables['dv'].keys():
            dv = variables['dv'][dv_name]
            dv_id = dv['ID']
            if dv['value'] is not None:
                value = dv['value']
            else:
                value = system.vec['u'](dv_id)
            scale = dv['scale']
            lower = dv['lower']
            upper = dv['upper']
            size = system.vec['u'](dv_id).shape[0]
            opt_prob.addVarGroup(dv_name, size, value=value, scale=scale,
                                 lower=lower, upper=upper)
        opt_prob.finalizeDesignVariables()
        for func_name in variables['func'].keys():
            func = variables['func'][func_name]
            func_id = func['ID']
            lower = func['lower']
            upper = func['upper']
            linear = func['linear']
            get_jacs = func['get_jacs']
            sys = func['sys']
            size = system.vec['u'](func_id).shape[0]
            if lower is None and upper is None:
                opt_prob.addObj(func_name)
            else:
                if get_jacs is not None:
                    jacs_var = get_jacs()

                    dv_names = []
                    jacs = {}
                    for dv_var in jacs_var:
                        dv_id = self._system.get_id(dv_var)
                        dv_name = self._get_name(dv_id)
                        dv_names.append(dv_name)
                        jacs[dv_name] = jacs_var[dv_var]

                    opt_prob.addConGroup(func_name, size,
                                         wrt=dv_names,
                                         jac=jacs, linear=linear,
                                         lower=lower, upper=upper)
                elif sys is not None:
                    dv_names = []
                    for dv_name in variables['dv'].keys():
                        dv_id = variables['dv'][dv_name]['ID']
                        if dv_id in sys.vec['u']:
                            dv_names.append(dv_name)
                    opt_prob.addConGroup(func_name, size,
                                         wrt=dv_names,
                                         lower=lower, upper=upper)                    
                else:
                    opt_prob.addConGroup(func_name, size,
                                         lower=lower, upper=upper)

        if options is None:
            options = {}

        opt = Optimizer(optimizer, options=options)
        opt.setOption('Iterations limit', int(1e6))
        #opt.setOption('Verify level', 3)
        sol = opt(opt_prob, sens=self.sens_func, storeHistory='hist.hst')
        print sol

        try:
            exit_status = sol.optInform['value']
            self.exit_flag = 1
            if exit_status > 2: # bad
                self.exit_flag = 0
        except KeyError: #nothing is here, so something bad happened!
            self.exit_flag = 0
예제 #32
0
optProb.addCon("cl_con_" + ap.name, lower=0.0, upper=0.0, scale=10.0)

# The MP object needs the 'obj' and 'sens' function for each proc set,
# the optimization problem and what the objcon function is:
MP.setProcSetObjFunc("cruise", cruiseFuncs)
MP.setProcSetSensFunc("cruise", cruiseFuncsSens)
MP.setObjCon(objCon)
MP.setOptProb(optProb)
optProb.printSparsity()
# rst optprob (end)
# rst optimizer
# Set up optimizer
optimizer = "SLSQP"
if optimizer == "SLSQP":
    optOptions = {"IFILE": os.path.join(outputDirectory, "SLSQP.out")}
    opt = OPT("slsqp", options=optOptions)
elif optimizer == "SNOPT":
    optOptions = {
        "Major feasibility tolerance": 1e-4,
        "Major optimality tolerance": 1e-4,
        "Difference interval": 1e-3,
        "Hessian full memory": None,
        "Function precision": 1e-8,
        "Print file": os.path.join(outputDirectory, "SNOPT_print.out"),
        "Summary file": os.path.join(outputDirectory, "SNOPT_summary.out"),
        'Major iterations limit': 1000,
    }
    opt = OPT("snopt", options=optOptions)

# Run Optimization
sol = opt(optProb, MP.sens, storeHistory=os.path.join(outputDirectory, "opt.hst"))