def test_sens(self):
        termcomp = TerminateComp(max_sens=3)
        optProb = Optimization("Paraboloid", termcomp.objfunc)

        optProb.addVarGroup("x", 1, type="c", lower=-50.0, upper=50.0, value=0.0)
        optProb.addVarGroup("y", 1, type="c", lower=-50.0, upper=50.0, value=0.0)
        optProb.finalizeDesignVariables()

        optProb.addObj("obj")

        optProb.addConGroup("con", 1, lower=-15.0, upper=-15.0, wrt=["x", "y"], linear=True, jac=con_jac)

        test_name = "SNOPT_user_termination_sens"
        optOptions = {
            "Print file": "{}.out".format(test_name),
            "Summary file": "{}_summary.out".format(test_name),
        }
        try:
            opt = SNOPT(options=optOptions)
        except Error:
            raise unittest.SkipTest("Optimizer not available: SNOPT")

        sol = opt(optProb, sens=termcomp.sens)

        self.assertEqual(termcomp.sens_count, 4)

        # Exit code for user requested termination.
        self.assertEqual(sol.optInform["value"], 71)
    def __call__(self, optimizer, options=None):
        """ Run optimization """
        system = self._system
        variables = self._variables

        opt_prob = OptProblem('Optimization', self.obj_func)
        for dv_name in variables['dv'].keys():
            dv_id = variables['dv'][dv_name]['ID']
            value = variables['dv'][dv_name]['value']
            lower = variables['dv'][dv_name]['lower']
            upper = variables['dv'][dv_name]['upper']
            size = system(dv_id).size
            opt_prob.addVarGroup(dv_name, size, value=value,
                                 lower=lower, upper=upper)
        opt_prob.finalizeDesignVariables()
        for func_name in variables['func'].keys():
            lower = variables['func'][func_name]['lower']
            upper = variables['func'][func_name]['upper']
            if lower is None and upper is None:
                opt_prob.addObj(func_name)
            else:
                opt_prob.addCon(func_name, lower=lower, upper=upper)

        if options is None:
            options = {}

        opt = Optimizer(optimizer, options=options)
        sol = opt(opt_prob, sens=self.sens_func)
        print sol
    def test_opt_bug1(self):
        # Due to a new feature, there is a TypeError when you optimize a model without a constraint.
        optProb = Optimization("Paraboloid", objfunc_no_con)

        # Design Variables
        optProb.addVarGroup("x", 1, type="c", lower=-50.0, upper=50.0, value=0.0)
        optProb.addVarGroup("y", 1, type="c", lower=-50.0, upper=50.0, value=0.0)
        optProb.finalizeDesignVariables()

        # Objective
        optProb.addObj("obj")

        test_name = "bugfix_SNOPT_bug1"
        optOptions = {
            "Major feasibility tolerance": 1e-1,
            "Print file": "{}.out".format(test_name),
            "Summary file": "{}_summary.out".format(test_name),
        }

        # Optimizer
        try:
            opt = SNOPT(options=optOptions)
        except Error:
            raise unittest.SkipTest("Optimizer not available: SNOPT")

        opt(optProb, sens=sens)
Esempio n. 4
0
    def test_opt(self):
        # Optimization Object
        optProb = Optimization('Paraboloid', objfunc)

        # Design Variables
        optProb.addVarGroup('x', 1, type='c', lower=-50.0, upper=50.0, value=0.0)
        optProb.addVarGroup('y', 1, type='c', lower=-50.0, upper=50.0, value=0.0)
        optProb.finalizeDesignVariables()

        # Objective
        optProb.addObj('obj')

        # Equality Constraint
        optProb.addConGroup('con', 1, lower=-15.0, upper=-15.0, wrt=['x', 'y'], linear=True, jac=con_jac)

        # Check optimization problem:
        print(optProb)


        # Optimizer
        try:
            opt = SNOPT(optOptions = {'Major feasibility tolerance' : 1e-1})
        except:
            raise unittest.SkipTest('Optimizer not available: SNOPT')

        sol = opt(optProb, sens=sens)

        # Check Solution 7.166667, -7.833334
        self.assertAlmostEqual(sol.variables['x'][0].value, 7.166667, places=6)
        self.assertAlmostEqual(sol.variables['y'][0].value, -7.833333, places=6)
Esempio n. 5
0
    def __call__(self, optimizer, options=None):
        """ Run optimization """
        system = self._system
        variables = self._variables

        opt_prob = OptProblem('Optimization', self.obj_func)
        for dv_name in variables['dv'].keys():
            dv_id = variables['dv'][dv_name]['ID']
            value = variables['dv'][dv_name]['value']
            lower = variables['dv'][dv_name]['lower']
            upper = variables['dv'][dv_name]['upper']
            size = system(dv_id).size
            opt_prob.addVarGroup(dv_name,
                                 size,
                                 value=value,
                                 lower=lower,
                                 upper=upper)
        opt_prob.finalizeDesignVariables()
        for func_name in variables['func'].keys():
            lower = variables['func'][func_name]['lower']
            upper = variables['func'][func_name]['upper']
            if lower is None and upper is None:
                opt_prob.addObj(func_name)
            else:
                opt_prob.addCon(func_name, lower=lower, upper=upper)

        if options is None:
            options = {}

        opt = Optimizer(optimizer, options=options)
        sol = opt(opt_prob, sens=self.sens_func)
        print sol
    def test_opt(self):
        # Optimization Object
        optProb = Optimization("Paraboloid", objfunc)

        # Design Variables
        optProb.addVarGroup("x",
                            1,
                            type="c",
                            lower=-50.0,
                            upper=50.0,
                            value=0.0)
        optProb.addVarGroup("y",
                            1,
                            type="c",
                            lower=-50.0,
                            upper=50.0,
                            value=0.0)
        optProb.finalizeDesignVariables()

        # Objective
        optProb.addObj("obj")

        # Equality Constraint
        optProb.addConGroup("con",
                            1,
                            lower=-15.0,
                            upper=-15.0,
                            wrt=["x", "y"],
                            linear=True,
                            jac=con_jac)

        # Check optimization problem:
        print(optProb)
        test_name = "bugfix_SNOPT_test_opt"
        optOptions = {
            "Major feasibility tolerance": 1e-1,
            "Print file": "{}.out".format(test_name),
            "Summary file": "{}_summary.out".format(test_name),
        }

        # Optimizer
        try:
            opt = SNOPT(options=optOptions)
        except Error:
            raise unittest.SkipTest("Optimizer not available: SNOPT")

        sol = opt(optProb, sens=sens)

        # Check Solution 7.166667, -7.833334
        tol = 1e-6
        assert_allclose(sol.variables["x"][0].value,
                        7.166667,
                        atol=tol,
                        rtol=tol)
        assert_allclose(sol.variables["y"][0].value,
                        -7.833333,
                        atol=tol,
                        rtol=tol)
Esempio n. 7
0
    def __call__(self, optimizer, options=None):
        """ Run optimization """
        system = self._system
        variables = self._variables

        opt_prob = OptProblem('Optimization', self.obj_func)
        for dv_name in variables['dv'].keys():
            dv = variables['dv'][dv_name]
            dv_id = dv['ID']
            value = dv['value']
            lower = dv['lower']
            upper = dv['upper']
            size = system.vec['u'](dv_id).shape[0]
            opt_prob.addVarGroup(dv_name, size, value=value,
                                 lower=lower, upper=upper)
        opt_prob.finalizeDesignVariables()
        for func_name in variables['func'].keys():
            func = variables['func'][func_name]
            func_id = func['ID']
            lower = func['lower']
            upper = func['upper']
            linear = func['linear']
            get_jacs = func['get_jacs']
            size = system.vec['u'](func_id).shape[0]
            if lower is None and upper is None:
                opt_prob.addObj(func_name)
            else:
                if func['get_jacs'] is None:
                    opt_prob.addConGroup(func_name, size,
                                         lower=lower, upper=upper)
                else:
                    jacs_var = get_jacs()

                    dv_names = []
                    jacs = {}
                    for dv_var in jacs_var:
                        dv_id = self._system.get_id(dv_var)
                        dv_name = self._get_name(dv_id)
                        dv_names.append(dv_name)
                        jacs[dv_name] = jacs_var[dv_var]

                    opt_prob.addConGroup(func_name, size,
                                         wrt=dv_names,
                                         jac=jacs, linear=linear,
                                         lower=lower, upper=upper)

        if options is None:
            options = {}

        opt = Optimizer(optimizer, options=options)
        opt.setOption('Iterations limit', int(1e6))
        #opt.setOption('Verify level', 3)
        sol = opt(opt_prob, sens=self.sens_func, storeHistory='hist.hst')
        print sol
    def test_opt_bug_print_2con(self):
        # Optimization Object
        optProb = Optimization("Paraboloid", objfunc_2con)

        # Design Variables
        optProb.addVarGroup("x", 1, type="c", lower=-50.0, upper=50.0, value=0.0)
        optProb.addVarGroup("y", 1, type="c", lower=-50.0, upper=50.0, value=0.0)
        optProb.finalizeDesignVariables()

        # Objective
        optProb.addObj("obj")

        con_jac2 = {}
        con_jac2["x"] = -np.ones((2, 1))
        con_jac2["y"] = np.ones((2, 1))

        con_jac3 = {}
        con_jac3["x"] = -np.ones((3, 1))
        con_jac3["y"] = np.ones((3, 1))

        # Equality Constraint
        optProb.addConGroup("con", 2, lower=-15.0, upper=-15.0, wrt=["x", "y"], linear=True, jac=con_jac2)
        optProb.addConGroup("con2", 3, lower=-15.0, upper=-15.0, wrt=["x", "y"], linear=True, jac=con_jac3)

        # Check optimization problem:
        print(optProb)

        test_name = "bugfix_SNOPT_bug_print_2con"
        optOptions = {
            "Major feasibility tolerance": 1e-1,
            "Print file": "{}.out".format(test_name),
            "Summary file": "{}_summary.out".format(test_name),
        }

        # Optimizer
        try:
            opt = SNOPT(options=optOptions)
        except Error:
            raise unittest.SkipTest("Optimizer not available: SNOPT")

        sol = opt(optProb, sens=sens)

        print(sol)
Esempio n. 9
0
    def test_opt_bug1(self):
        # Due to a new feature, there is a TypeError when you optimize a model without a constraint.
        optProb = Optimization('Paraboloid', objfunc_no_con)

        # Design Variables
        optProb.addVarGroup('x', 1, type='c', lower=-50.0, upper=50.0, value=0.0)
        optProb.addVarGroup('y', 1, type='c', lower=-50.0, upper=50.0, value=0.0)
        optProb.finalizeDesignVariables()

        # Objective
        optProb.addObj('obj')

        # Optimizer
        try:
            opt = SNOPT(optOptions = {'Major feasibility tolerance' : 1e-1})
        except:
            raise unittest.SkipTest('Optimizer not available: SNOPT')

        sol = opt(optProb, sens=sens)
Esempio n. 10
0
    def test_obj(self):
        termcomp = TerminateComp(max_obj=2)
        optProb = Optimization('Paraboloid', termcomp.objfunc)

        optProb.addVarGroup('x',
                            1,
                            type='c',
                            lower=-50.0,
                            upper=50.0,
                            value=0.0)
        optProb.addVarGroup('y',
                            1,
                            type='c',
                            lower=-50.0,
                            upper=50.0,
                            value=0.0)
        optProb.finalizeDesignVariables()

        optProb.addObj('obj')

        optProb.addConGroup('con',
                            1,
                            lower=-15.0,
                            upper=-15.0,
                            wrt=['x', 'y'],
                            linear=True,
                            jac=con_jac)

        try:
            opt = SNOPT()
        except:
            raise unittest.SkipTest('Optimizer not available: SNOPT')

        sol = opt(optProb, sens=termcomp.sens)

        self.assertEqual(termcomp.obj_count, 3)

        # Exit code for user requested termination.
        self.assertEqual(sol.optInform['value'][0], 71)
Esempio n. 11
0
    def test_opt_bug_print_2con(self):
        # Optimization Object
        optProb = Optimization('Paraboloid', objfunc_2con)

        # Design Variables
        optProb.addVarGroup('x', 1, type='c', lower=-50.0, upper=50.0, value=0.0)
        optProb.addVarGroup('y', 1, type='c', lower=-50.0, upper=50.0, value=0.0)
        optProb.finalizeDesignVariables()

        # Objective
        optProb.addObj('obj')

        con_jac2 = {}
        con_jac2['x'] = -np.ones((2, 1))
        con_jac2['y'] = np.ones((2, 1))

        con_jac3 = {}
        con_jac3['x'] = -np.ones((3, 1))
        con_jac3['y'] = np.ones((3, 1))

        # Equality Constraint
        optProb.addConGroup('con', 2, lower=-15.0, upper=-15.0, wrt=['x', 'y'], linear=True, jac=con_jac2)
        optProb.addConGroup('con2', 3, lower=-15.0, upper=-15.0, wrt=['x', 'y'], linear=True, jac=con_jac3)

        # Check optimization problem:
        print(optProb)

        # Optimizer
        try:
            opt = SNOPT(optOptions = {'Major feasibility tolerance' : 1e-1})
        except:
            raise unittest.SkipTest('Optimizer not available: SNOPT')

        sol = opt(optProb, sens=sens)

        print(sol)
Esempio n. 12
0
    def run(self, problem):
        """pyOpt execution. Note that pyOpt controls the execution, and the
        individual optimizers (i.e., SNOPT) control the iteration.

        Args
        ----
        problem : `Problem`
            Our parent `Problem`.
        """

        self.pyopt_solution = None
        rel = problem.root._relevance

        # Metadata Setup
        self.metadata = create_local_meta(None, self.options['optimizer'])
        self.iter_count = 0
        update_local_meta(self.metadata, (self.iter_count,))

        # Initial Run
        problem.root.solve_nonlinear(metadata=self.metadata)

        opt_prob = Optimization(self.options['title'], self.objfunc)

        # Add all parameters
        param_meta = self.get_param_metadata()
        param_list = list(iterkeys(param_meta))
        param_vals = self.get_params()
        for name, meta in iteritems(param_meta):
            opt_prob.addVarGroup(name, meta['size'], type='c',
                                 value=param_vals[name],
                                 lower=meta['low'], upper=meta['high'])

        opt_prob.finalizeDesignVariables()

        # Add all objectives
        objs = self.get_objectives()
        self.quantities = list(iterkeys(objs))
        for name in objs:
            opt_prob.addObj(name)

        # Calculate and save gradient for any linear constraints.
        lcons = self.get_constraints(lintype='linear').values()
        if len(lcons) > 0:
            self.lin_jacs = problem.calc_gradient(param_list, lcons,
                                                  return_format='dict')
            #print("Linear Gradient")
            #print(self.lin_jacs)

        # Add all equality constraints
        econs = self.get_constraints(ctype='eq', lintype='nonlinear')
        con_meta = self.get_constraint_metadata()
        self.quantities += list(iterkeys(econs))
        for name in econs:
            size = con_meta[name]['size']
            lower = np.zeros((size))
            upper = np.zeros((size))

            # Sparsify Jacobian via relevance
            wrt = rel.relevant[name].intersection(param_list)

            if con_meta[name]['linear'] is True:
                opt_prob.addConGroup(name, size, lower=lower, upper=upper,
                                     linear=True, wrt=wrt,
                                     jac=self.lin_jacs[name])
            else:
                opt_prob.addConGroup(name, size, lower=lower, upper=upper,
                                     wrt=wrt)

        # Add all inequality constraints
        incons = self.get_constraints(ctype='ineq', lintype='nonlinear')
        self.quantities += list(iterkeys(incons))
        for name in incons:
            size = con_meta[name]['size']
            upper = np.zeros((size))

            # Sparsify Jacobian via relevance
            wrt = rel.relevant[name].intersection(param_list)

            if con_meta[name]['linear'] is True:
                opt_prob.addConGroup(name, size, upper=upper, linear=True,
                                     wrt=wrt, jac=self.lin_jacs[name])
            else:
                opt_prob.addConGroup(name, size, upper=upper, wrt=wrt)

        # TODO: Support double-sided constraints in openMDAO
        # Add all double_sided constraints
        #for name, con in iteritems(self.get_2sided_constraints()):
            #size = con_meta[name]['size']
            #upper = con.high * np.ones((size))
            #lower = con.low * np.ones((size))
            #name = '%s.out0' % con.pcomp_name
            #if con.linear is True:
                #opt_prob.addConGroup(name,
                #size, upper=upper, lower=lower,
                                     #linear=True, wrt=param_list,
                                     #jac=self.lin_jacs[name])
            #else:
                #opt_prob.addConGroup(name,
                #                     size, upper=upper, lower=lower)

        # Instantiate the requested optimizer
        optimizer = self.options['optimizer']
        try:
            exec('from pyoptsparse import %s' % optimizer)
        except ImportError:
            msg = "Optimizer %s is not available in this installation." % \
                   optimizer
            raise ImportError(msg)

        optname = vars()[optimizer]
        opt = optname()

        #Set optimization options
        for option, value in self.opt_settings.items():
            opt.setOption(option, value)

        self._problem = problem

        # Execute the optimization problem
        if self.options['pyopt_diff'] is True:
            # Use pyOpt's internal finite difference
            fd_step = problem.root.fd_options['step_size']
            sol = opt(opt_prob, sens='FD', sensStep=fd_step)
        else:
            # Use OpenMDAO's differentiator for the gradient
            sol = opt(opt_prob, sens=self.gradfunc)

        self._problem = None

        # Print results
        if self.options['print_results'] is True:
            print(sol)

        # Pull optimal parameters back into framework and re-run, so that
        # framework is left in the right final state
        dv_dict = sol.getDVs()
        for name in self.get_params():
            val = dv_dict[name]
            self.set_param(name, val)

        self.root.solve_nonlinear(metadata=self.metadata)

        # Save the most recent solution.
        self.pyopt_solution = sol
        try:
            exit_status = sol.optInform['value']
            self.exit_flag = 1
            if exit_status > 2: # bad
                self.exit_flag = 0
        except KeyError: #nothing is here, so something bad happened!
            self.exit_flag = 0
Esempio n. 13
0
    def run(self, problem):
        """pyOpt execution. Note that pyOpt controls the execution, and the
        individual optimizers (i.e., SNOPT) control the iteration.

        Args
        ----
        problem : `Problem`
            Our parent `Problem`.
        """

        self.pyopt_solution = None
        rel = problem.root._probdata.relevance

        # Metadata Setup
        self.metadata = create_local_meta(None, self.options['optimizer'])
        self.iter_count = 0
        update_local_meta(self.metadata, (self.iter_count,))

        # Initial Run
        problem.root.solve_nonlinear(metadata=self.metadata)

        opt_prob = Optimization(self.options['title'], self._objfunc)

        # Add all parameters
        param_meta = self.get_desvar_metadata()
        self.indep_list = indep_list = list(iterkeys(param_meta))
        param_vals = self.get_desvars()

        for name, meta in iteritems(param_meta):
            opt_prob.addVarGroup(name, meta['size'], type='c',
                                 value=param_vals[name],
                                 lower=meta['lower'], upper=meta['upper'])

        opt_prob.finalizeDesignVariables()

        # Add all objectives
        objs = self.get_objectives()
        self.quantities = list(iterkeys(objs))
        self.sparsity = OrderedDict() #{}
        for name in objs:
            opt_prob.addObj(name)
            self.sparsity[name] = self.indep_list

        # Calculate and save gradient for any linear constraints.
        lcons = self.get_constraints(lintype='linear').keys()
        if len(lcons) > 0:
            self.lin_jacs = problem.calc_gradient(indep_list, lcons,
                                                  return_format='dict')
            #print("Linear Gradient")
            #print(self.lin_jacs)

        # Add all equality constraints
        econs = self.get_constraints(ctype='eq', lintype='nonlinear')
        con_meta = self.get_constraint_metadata()
        self.quantities += list(iterkeys(econs))

        for name in self.get_constraints(ctype='eq'):
            size = con_meta[name]['size']
            lower = upper = con_meta[name]['equals']

            # Sparsify Jacobian via relevance
            wrt = rel.relevant[name].intersection(indep_list)
            self.sparsity[name] = wrt

            if con_meta[name]['linear'] is True:
                opt_prob.addConGroup(name, size, lower=lower, upper=upper,
                                     linear=True, wrt=wrt,
                                     jac=self.lin_jacs[name])
            else:
                opt_prob.addConGroup(name, size, lower=lower, upper=upper,
                                     wrt=wrt)

        # Add all inequality constraints
        incons = self.get_constraints(ctype='ineq', lintype='nonlinear')
        self.quantities += list(iterkeys(incons))

        for name in self.get_constraints(ctype='ineq'):
            size = con_meta[name]['size']

            # Bounds - double sided is supported
            lower = con_meta[name]['lower']
            upper = con_meta[name]['upper']

            # Sparsify Jacobian via relevance
            wrt = rel.relevant[name].intersection(indep_list)
            self.sparsity[name] = wrt

            if con_meta[name]['linear'] is True:
                opt_prob.addConGroup(name, size, upper=upper, lower=lower,
                                     linear=True, wrt=wrt,
                                     jac=self.lin_jacs[name])
            else:
                opt_prob.addConGroup(name, size, upper=upper, lower=lower,
                                     wrt=wrt)

        # Instantiate the requested optimizer
        optimizer = self.options['optimizer']
        try:
            exec('from pyoptsparse import %s' % optimizer)
        except ImportError:
            msg = "Optimizer %s is not available in this installation." % \
                   optimizer
            raise ImportError(msg)

        optname = vars()[optimizer]
        opt = optname()

        #Set optimization options
        for option, value in self.opt_settings.items():
            opt.setOption(option, value)

        self._problem = problem

        # Execute the optimization problem
        if self.options['pyopt_diff'] is True:
            # Use pyOpt's internal finite difference
            fd_step = problem.root.fd_options['step_size']
            sol = opt(opt_prob, sens='FD', sensStep=fd_step, storeHistory=self.hist_file)
        else:
            # Use OpenMDAO's differentiator for the gradient
            sol = opt(opt_prob, sens=self._gradfunc, storeHistory=self.hist_file)

        self._problem = None

        # Print results
        if self.options['print_results'] is True:
            print(sol)

        # Pull optimal parameters back into framework and re-run, so that
        # framework is left in the right final state
        dv_dict = sol.getDVs()
        for name in indep_list:
            val = dv_dict[name]
            self.set_desvar(name, val)

        self.root.solve_nonlinear(metadata=self.metadata)

        # Save the most recent solution.
        self.pyopt_solution = sol
        try:
            exit_status = sol.optInform['value']
            self.exit_flag = 1
            if exit_status > 2: # bad
                self.exit_flag = 0
        except KeyError: #nothing is here, so something bad happened!
            self.exit_flag = 0
Esempio n. 14
0
    def __call__(self, optimizer, options=None):
        """ Run optimization """
        system = self._system
        variables = self._variables

        opt_prob = OptProblem('Optimization', self.obj_func)
        for dv_name in variables['dv'].keys():
            dv = variables['dv'][dv_name]
            dv_id = dv['ID']
            value = dv['value']
            lower = dv['lower']
            upper = dv['upper']
            size = system.vec['u'](dv_id).shape[0]
            opt_prob.addVarGroup(dv_name,
                                 size,
                                 value=value,
                                 lower=lower,
                                 upper=upper)
        opt_prob.finalizeDesignVariables()
        for func_name in variables['func'].keys():
            func = variables['func'][func_name]
            func_id = func['ID']
            lower = func['lower']
            upper = func['upper']
            linear = func['linear']
            get_jacs = func['get_jacs']
            size = system.vec['u'](func_id).shape[0]
            if lower is None and upper is None:
                opt_prob.addObj(func_name)
            else:
                if func['get_jacs'] is None:
                    opt_prob.addConGroup(func_name,
                                         size,
                                         lower=lower,
                                         upper=upper)
                else:
                    jacs_var = get_jacs()

                    dv_names = []
                    jacs = {}
                    for dv_var in jacs_var:
                        dv_id = self._system.get_id(dv_var)
                        dv_name = self._get_name(dv_id)
                        dv_names.append(dv_name)
                        jacs[dv_name] = jacs_var[dv_var]

                    opt_prob.addConGroup(func_name,
                                         size,
                                         wrt=dv_names,
                                         jac=jacs,
                                         linear=linear,
                                         lower=lower,
                                         upper=upper)

        if options is None:
            options = {}

        opt = Optimizer(optimizer, options=options)
        opt.setOption('Iterations limit', int(1e6))
        #opt.setOption('Verify level', 3)
        sol = opt(opt_prob, sens=self.sens_func, storeHistory='hist.hst')
        print sol
Esempio n. 15
0
    def __call__(self, optimizer, options=None):
        """ Run optimization """
        system = self._system
        variables = self._variables

        opt_prob = OptProblem('Optimization', self.obj_func)
        for dv_name in variables['dv'].keys():
            dv = variables['dv'][dv_name]
            dv_id = dv['ID']
            if dv['value'] is not None:
                value = dv['value']
            else:
                value = system.vec['u'](dv_id)
            scale = dv['scale']
            lower = dv['lower']
            upper = dv['upper']
            size = system.vec['u'](dv_id).shape[0]
            opt_prob.addVarGroup(dv_name, size, value=value, scale=scale,
                                 lower=lower, upper=upper)
        opt_prob.finalizeDesignVariables()
        for func_name in variables['func'].keys():
            func = variables['func'][func_name]
            func_id = func['ID']
            lower = func['lower']
            upper = func['upper']
            linear = func['linear']
            get_jacs = func['get_jacs']
            sys = func['sys']
            size = system.vec['u'](func_id).shape[0]
            if lower is None and upper is None:
                opt_prob.addObj(func_name)
            else:
                if get_jacs is not None:
                    jacs_var = get_jacs()

                    dv_names = []
                    jacs = {}
                    for dv_var in jacs_var:
                        dv_id = self._system.get_id(dv_var)
                        dv_name = self._get_name(dv_id)
                        dv_names.append(dv_name)
                        jacs[dv_name] = jacs_var[dv_var]

                    opt_prob.addConGroup(func_name, size,
                                         wrt=dv_names,
                                         jac=jacs, linear=linear,
                                         lower=lower, upper=upper)
                elif sys is not None:
                    dv_names = []
                    for dv_name in variables['dv'].keys():
                        dv_id = variables['dv'][dv_name]['ID']
                        if dv_id in sys.vec['u']:
                            dv_names.append(dv_name)
                    opt_prob.addConGroup(func_name, size,
                                         wrt=dv_names,
                                         lower=lower, upper=upper)                    
                else:
                    opt_prob.addConGroup(func_name, size,
                                         lower=lower, upper=upper)

        if options is None:
            options = {}

        opt = Optimizer(optimizer, options=options)
        opt.setOption('Iterations limit', int(1e6))
        #opt.setOption('Verify level', 3)
        sol = opt(opt_prob, sens=self.sens_func, storeHistory='hist.hst')
        print sol

        try:
            exit_status = sol.optInform['value']
            self.exit_flag = 1
            if exit_status > 2: # bad
                self.exit_flag = 0
        except KeyError: #nothing is here, so something bad happened!
            self.exit_flag = 0
Esempio n. 16
0
    sens = None
if sens == 'user':
    sens = sensfunc
if sens == 'matrix-free':
    sens = [objgrad, jprod, jtprod]

# Instantiate Optimization Problem
optProb = Optimization('Rosenbrock function', objfunc)
optProb.addVarGroup('xvars',
                    2,
                    'c',
                    value=[3, -3],
                    lower=-5.12,
                    upper=5.12,
                    scale=[1.0, 1.0])
optProb.finalizeDesignVariables()
if constrained:
    optProb.addCon('con', upper=0, scale=1.0)
optProb.addObj('obj')

# Create optimizer
opt = OPT(args.opt, options=optOptions)
if testHist == 'no':
    # Just run a normal run
    sol = opt(optProb, sens=sens, sensMode=sensMode)
    # print(sol.fStar)
    print(sol)
else:
    # First call just does 10 iterations
    if args.opt.lower() == 'snopt':
        opt.setOption('Major iterations limit', 10)
Esempio n. 17
0
    def run(self, problem):
        """pyOpt execution. Note that pyOpt controls the execution, and the
        individual optimizers (i.e., SNOPT) control the iteration.

        Args
        ----
        problem : `Problem`
            Our parent `Problem`.
        """

        self.pyopt_solution = None
        rel = problem.root._probdata.relevance

        # Metadata Setup
        self.metadata = create_local_meta(None, self.options['optimizer'])
        self.iter_count = 0
        update_local_meta(self.metadata, (self.iter_count, ))

        # Initial Run
        with problem.root._dircontext:
            problem.root.solve_nonlinear(metadata=self.metadata)

        opt_prob = Optimization(self.options['title'], self._objfunc)

        # Add all parameters
        param_meta = self.get_desvar_metadata()
        self.indep_list = indep_list = list(param_meta)
        param_vals = self.get_desvars()

        for name, meta in iteritems(param_meta):
            opt_prob.addVarGroup(name,
                                 meta['size'],
                                 type='c',
                                 value=param_vals[name],
                                 lower=meta['lower'],
                                 upper=meta['upper'])

        opt_prob.finalizeDesignVariables()

        # Figure out parameter subsparsity for paramcomp index connections.
        # sub_param_conns is empty unless there are some index conns.
        # full_param_conns gets filled with the connections to the entire
        # parameter so that those params can be filtered out of the sparse
        # set if the full path is also relevant
        sub_param_conns = {}
        full_param_conns = {}
        for name in indep_list:
            pathname = problem.root.unknowns.metadata(name)['pathname']
            sub_param_conns[name] = {}
            full_param_conns[name] = set()
            for target, info in iteritems(problem.root.connections):
                src, indices = info
                if src == pathname:
                    if indices is not None:
                        # Need to map the connection indices onto the desvar
                        # indices if both are declared.
                        dv_idx = param_meta[name].get('indices')
                        indices = set(indices)
                        if dv_idx is not None:
                            indices.intersection_update(dv_idx)
                            ldv_idx = list(dv_idx)
                            mapped_idx = [
                                ldv_idx.index(item) for item in indices
                            ]
                            sub_param_conns[name][target] = mapped_idx
                        else:
                            sub_param_conns[name][target] = indices
                    else:
                        full_param_conns[name].add(target)

        # Add all objectives
        objs = self.get_objectives()
        self.quantities = list(objs)
        self.sparsity = OrderedDict()
        self.sub_sparsity = OrderedDict()
        for name in objs:
            opt_prob.addObj(name)
            self.sparsity[name] = self.indep_list

        # Calculate and save gradient for any linear constraints.
        lcons = self.get_constraints(lintype='linear').keys()
        self._problem = problem
        if len(lcons) > 0:
            self.lin_jacs = self.calc_gradient(indep_list,
                                               lcons,
                                               return_format='dict')
            #print("Linear Gradient")
            #print(self.lin_jacs)

        # Add all equality constraints
        econs = self.get_constraints(ctype='eq', lintype='nonlinear')
        con_meta = self.get_constraint_metadata()
        self.quantities += list(econs)

        self.active_tols = {}
        for name in self.get_constraints(ctype='eq'):
            meta = con_meta[name]
            size = meta['size']
            lower = upper = meta['equals']

            # Sparsify Jacobian via relevance
            rels = rel.relevant[name]
            wrt = rels.intersection(indep_list)
            self.sparsity[name] = wrt

            if meta['linear']:
                opt_prob.addConGroup(name,
                                     size,
                                     lower=lower,
                                     upper=upper,
                                     linear=True,
                                     wrt=wrt,
                                     jac=self.lin_jacs[name])
            else:

                jac = self._build_sparse(name, wrt, size, param_vals,
                                         sub_param_conns, full_param_conns,
                                         rels)
                opt_prob.addConGroup(name,
                                     size,
                                     lower=lower,
                                     upper=upper,
                                     wrt=wrt,
                                     jac=jac)

            active_tol = meta.get('active_tol')
            if active_tol:
                self.active_tols[name] = active_tol

        # Add all inequality constraints
        incons = self.get_constraints(ctype='ineq', lintype='nonlinear')
        self.quantities += list(incons)

        for name in self.get_constraints(ctype='ineq'):
            meta = con_meta[name]
            size = meta['size']

            # Bounds - double sided is supported
            lower = meta['lower']
            upper = meta['upper']

            # Sparsify Jacobian via relevance
            rels = rel.relevant[name]
            wrt = rels.intersection(indep_list)
            self.sparsity[name] = wrt

            if meta['linear']:
                opt_prob.addConGroup(name,
                                     size,
                                     upper=upper,
                                     lower=lower,
                                     linear=True,
                                     wrt=wrt,
                                     jac=self.lin_jacs[name])
            else:

                jac = self._build_sparse(name, wrt, size, param_vals,
                                         sub_param_conns, full_param_conns,
                                         rels)
                opt_prob.addConGroup(name,
                                     size,
                                     upper=upper,
                                     lower=lower,
                                     wrt=wrt,
                                     jac=jac)

            active_tol = meta.get('active_tol')
            if active_tol is not None:
                self.active_tols[name] = active_tol

        # Instantiate the requested optimizer
        optimizer = self.options['optimizer']
        try:
            _tmp = __import__('pyoptsparse', globals(), locals(), [optimizer],
                              0)
            opt = getattr(_tmp, optimizer)()
        except ImportError:
            msg = "Optimizer %s is not available in this installation." % \
                   optimizer
            raise ImportError(msg)

        #Set optimization options
        for option, value in self.opt_settings.items():
            opt.setOption(option, value)

        self.opt_prob = opt_prob

        # Execute the optimization problem
        if self.options['gradient method'] == 'pyopt_fd':

            # Use pyOpt's internal finite difference
            fd_step = problem.root.deriv_options['step_size']
            sol = opt(opt_prob,
                      sens='FD',
                      sensStep=fd_step,
                      storeHistory=self.hist_file,
                      hotStart=self.hotstart_file)

        elif self.options['gradient method'] == 'snopt_fd':
            if self.options['optimizer'] == 'SNOPT':

                # Use SNOPT's internal finite difference
                fd_step = problem.root.deriv_options['step_size']
                sol = opt(opt_prob,
                          sens=None,
                          sensStep=fd_step,
                          storeHistory=self.hist_file,
                          hotStart=self.hotstart_file)

            else:
                msg = "SNOPT's internal finite difference can only be used with SNOPT"
                raise Exception(msg)
        else:

            # Use OpenMDAO's differentiator for the gradient
            sol = opt(opt_prob,
                      sens=self._gradfunc,
                      storeHistory=self.hist_file,
                      hotStart=self.hotstart_file)

        self._problem = None

        # Print results
        if self.options['print_results']:
            print(sol)

        # Pull optimal parameters back into framework and re-run, so that
        # framework is left in the right final state
        dv_dict = sol.getDVs()
        for name in indep_list:
            val = dv_dict[name]
            self.set_desvar(name, val)

        with self.root._dircontext:
            self.root.solve_nonlinear(metadata=self.metadata)

        # Save the most recent solution.
        self.pyopt_solution = sol
        try:
            exit_status = sol.optInform['value']
            self.exit_flag = 1
            if exit_status > 2:  # bad
                self.exit_flag = 0
        except KeyError:  #nothing is here, so something bad happened!
            self.exit_flag = 0
Esempio n. 18
0
    def run(self):
        """
        Excute pyOptsparse.

        Note that pyOpt controls the execution, and the individual optimizers
        (e.g., SNOPT) control the iteration.

        Returns
        -------
        boolean
            Failure flag; True if failed to converge, False is successful.
        """
        problem = self._problem
        model = problem.model
        relevant = model._relevant
        self.pyopt_solution = None
        self._total_jac = None
        self.iter_count = 0
        fwd = problem._mode == 'fwd'
        optimizer = self.options['optimizer']

        # Only need initial run if we have linear constraints or if we are using an optimizer that
        # doesn't perform one initially.
        con_meta = self._cons
        model_ran = False
        if optimizer in run_required or np.any([con['linear'] for con in itervalues(self._cons)]):
            with RecordingDebugging(self._get_name(), self.iter_count, self) as rec:
                # Initial Run
                model.run_solve_nonlinear()
                rec.abs = 0.0
                rec.rel = 0.0
                model_ran = True
            self.iter_count += 1

        # compute dynamic simul deriv coloring or just sparsity if option is set
        if coloring_mod._use_sparsity:
            if self.options['dynamic_simul_derivs']:
                coloring_mod.dynamic_simul_coloring(self, run_model=not model_ran,
                                                    do_sparsity=True)
            elif self.options['dynamic_derivs_sparsity']:
                coloring_mod.dynamic_sparsity(self)

        opt_prob = Optimization(self.options['title'], self._objfunc)

        # Add all design variables
        param_meta = self._designvars
        self._indep_list = indep_list = list(param_meta)
        param_vals = self.get_design_var_values()

        for name, meta in iteritems(param_meta):
            opt_prob.addVarGroup(name, meta['size'], type='c',
                                 value=param_vals[name],
                                 lower=meta['lower'], upper=meta['upper'])

        opt_prob.finalizeDesignVariables()

        # Add all objectives
        objs = self.get_objective_values()
        for name in objs:
            opt_prob.addObj(name)
            self._quantities.append(name)

        # Calculate and save derivatives for any linear constraints.
        lcons = [key for (key, con) in iteritems(con_meta) if con['linear']]
        if len(lcons) > 0:
            _lin_jacs = self._compute_totals(of=lcons, wrt=indep_list, return_format='dict')
            # convert all of our linear constraint jacs to COO format. Otherwise pyoptsparse will
            # do it for us and we'll end up with a fully dense COO matrix and very slow evaluation
            # of linear constraints!
            to_remove = []
            for jacdct in itervalues(_lin_jacs):
                for n, subjac in iteritems(jacdct):
                    if isinstance(subjac, np.ndarray):
                        # we can safely use coo_matrix to automatically convert the ndarray
                        # since our linear constraint jacs are constant, so zeros won't become
                        # nonzero during the optimization.
                        mat = coo_matrix(subjac)
                        if mat.row.size > 0:
                            # convert to 'coo' format here to avoid an emphatic warning
                            # by pyoptsparse.
                            jacdct[n] = {'coo': [mat.row, mat.col, mat.data], 'shape': mat.shape}

        # Add all equality constraints
        for name, meta in iteritems(con_meta):
            if meta['equals'] is None:
                continue
            size = meta['size']
            lower = upper = meta['equals']
            if fwd:
                wrt = [v for v in indep_list if name in relevant[v]]
            else:
                rels = relevant[name]
                wrt = [v for v in indep_list if v in rels]

            if meta['linear']:
                jac = {w: _lin_jacs[name][w] for w in wrt}
                opt_prob.addConGroup(name, size, lower=lower, upper=upper,
                                     linear=True, wrt=wrt, jac=jac)
            else:
                if name in self._res_jacs:
                    resjac = self._res_jacs[name]
                    jac = {n: resjac[n] for n in wrt}
                else:
                    jac = None
                opt_prob.addConGroup(name, size, lower=lower, upper=upper, wrt=wrt, jac=jac)
                self._quantities.append(name)

        # Add all inequality constraints
        for name, meta in iteritems(con_meta):
            if meta['equals'] is not None:
                continue
            size = meta['size']

            # Bounds - double sided is supported
            lower = meta['lower']
            upper = meta['upper']

            if fwd:
                wrt = [v for v in indep_list if name in relevant[v]]
            else:
                rels = relevant[name]
                wrt = [v for v in indep_list if v in rels]

            if meta['linear']:
                jac = {w: _lin_jacs[name][w] for w in wrt}
                opt_prob.addConGroup(name, size, upper=upper, lower=lower,
                                     linear=True, wrt=wrt, jac=jac)
            else:
                if name in self._res_jacs:
                    resjac = self._res_jacs[name]
                    jac = {n: resjac[n] for n in wrt}
                else:
                    jac = None
                opt_prob.addConGroup(name, size, upper=upper, lower=lower, wrt=wrt, jac=jac)
                self._quantities.append(name)

        # Instantiate the requested optimizer
        try:
            _tmp = __import__('pyoptsparse', globals(), locals(), [optimizer], 0)
            opt = getattr(_tmp, optimizer)()

        except Exception as err:
            # Change whatever pyopt gives us to an ImportError, give it a readable message,
            # but raise with the original traceback.
            msg = "Optimizer %s is not available in this installation." % optimizer
            reraise(ImportError, ImportError(msg), sys.exc_info()[2])

        # Set optimization options
        for option, value in self.opt_settings.items():
            opt.setOption(option, value)

        # Execute the optimization problem
        if self.options['gradient method'] == 'pyopt_fd':

            # Use pyOpt's internal finite difference
            # TODO: Need to get this from OpenMDAO
            # fd_step = problem.root.deriv_options['step_size']
            fd_step = 1e-6
            sol = opt(opt_prob, sens='FD', sensStep=fd_step, storeHistory=self.hist_file,
                      hotStart=self.hotstart_file)

        elif self.options['gradient method'] == 'snopt_fd':
            if self.options['optimizer'] == 'SNOPT':

                # Use SNOPT's internal finite difference
                # TODO: Need to get this from OpenMDAO
                # fd_step = problem.root.deriv_options['step_size']
                fd_step = 1e-6
                sol = opt(opt_prob, sens=None, sensStep=fd_step, storeHistory=self.hist_file,
                          hotStart=self.hotstart_file)

            else:
                msg = "SNOPT's internal finite difference can only be used with SNOPT"
                raise Exception(msg)
        else:

            # Use OpenMDAO's differentiator for the gradient
            sol = opt(opt_prob, sens=self._gradfunc, storeHistory=self.hist_file,
                      hotStart=self.hotstart_file)

        # Print results
        if self.options['print_results']:
            print(sol)

        # Pull optimal parameters back into framework and re-run, so that
        # framework is left in the right final state
        dv_dict = sol.getDVs()
        for name in indep_list:
            self.set_design_var(name, dv_dict[name])

        with RecordingDebugging(self._get_name(), self.iter_count, self) as rec:
            model.run_solve_nonlinear()
            rec.abs = 0.0
            rec.rel = 0.0
        self.iter_count += 1

        # Save the most recent solution.
        self.pyopt_solution = sol
        try:
            exit_status = sol.optInform['value']
            self.fail = False

            # These are various failed statuses.
            if exit_status > 2:
                self.fail = True

        except KeyError:
            # optimizers other than pySNOPT may not populate this dict
            pass

        return self.fail
Esempio n. 19
0
    def run(self):
        """
        Excute pyOptsparse.

        Note that pyOpt controls the execution, and the individual optimizers
        (e.g., SNOPT) control the iteration.

        Returns
        -------
        boolean
            Failure flag; True if failed to converge, False is successful.
        """
        problem = self._problem()
        model = problem.model
        relevant = model._relevant
        self.pyopt_solution = None
        self._total_jac = None
        self.iter_count = 0
        fwd = problem._mode == 'fwd'
        optimizer = self.options['optimizer']
        self._quantities = []

        self._check_for_missing_objective()

        # Only need initial run if we have linear constraints or if we are using an optimizer that
        # doesn't perform one initially.
        con_meta = self._cons
        model_ran = False
        if optimizer in run_required or np.any(
            [con['linear'] for con in self._cons.values()]):
            with RecordingDebugging(self._get_name(), self.iter_count,
                                    self) as rec:
                # Initial Run
                model.run_solve_nonlinear()
                rec.abs = 0.0
                rec.rel = 0.0
                model_ran = True
            self.iter_count += 1

        # compute dynamic simul deriv coloring or just sparsity if option is set
        if c_mod._use_total_sparsity:
            coloring = None
            if self._coloring_info['coloring'] is None and self._coloring_info[
                    'dynamic']:
                coloring = c_mod.dynamic_total_coloring(
                    self,
                    run_model=not model_ran,
                    fname=self._get_total_coloring_fname())

            if coloring is not None:
                # if the improvement wasn't large enough, don't use coloring
                pct = coloring._solves_info()[-1]
                info = self._coloring_info
                if info['min_improve_pct'] > pct:
                    info['coloring'] = info['static'] = None
                    simple_warning(
                        "%s: Coloring was deactivated.  Improvement of %.1f%% was less "
                        "than min allowed (%.1f%%)." %
                        (self.msginfo, pct, info['min_improve_pct']))

        comm = None if isinstance(problem.comm, FakeComm) else problem.comm
        opt_prob = Optimization(self.options['title'],
                                weak_method_wrapper(self, '_objfunc'),
                                comm=comm)

        # Add all design variables
        param_meta = self._designvars
        self._indep_list = indep_list = list(param_meta)
        param_vals = self.get_design_var_values()

        for name, meta in param_meta.items():
            opt_prob.addVarGroup(name,
                                 meta['size'],
                                 type='c',
                                 value=param_vals[name],
                                 lower=meta['lower'],
                                 upper=meta['upper'])

        opt_prob.finalizeDesignVariables()

        # Add all objectives
        objs = self.get_objective_values()
        for name in objs:
            opt_prob.addObj(name)
            self._quantities.append(name)

        # Calculate and save derivatives for any linear constraints.
        lcons = [key for (key, con) in con_meta.items() if con['linear']]
        if len(lcons) > 0:
            _lin_jacs = self._compute_totals(of=lcons,
                                             wrt=indep_list,
                                             return_format='dict')
            # convert all of our linear constraint jacs to COO format. Otherwise pyoptsparse will
            # do it for us and we'll end up with a fully dense COO matrix and very slow evaluation
            # of linear constraints!
            to_remove = []
            for jacdct in _lin_jacs.values():
                for n, subjac in jacdct.items():
                    if isinstance(subjac, np.ndarray):
                        # we can safely use coo_matrix to automatically convert the ndarray
                        # since our linear constraint jacs are constant, so zeros won't become
                        # nonzero during the optimization.
                        mat = coo_matrix(subjac)
                        if mat.row.size > 0:
                            # convert to 'coo' format here to avoid an emphatic warning
                            # by pyoptsparse.
                            jacdct[n] = {
                                'coo': [mat.row, mat.col, mat.data],
                                'shape': mat.shape
                            }

        # Add all equality constraints
        for name, meta in con_meta.items():
            if meta['equals'] is None:
                continue
            size = meta['size']
            lower = upper = meta['equals']
            if fwd:
                wrt = [v for v in indep_list if name in relevant[v]]
            else:
                rels = relevant[name]
                wrt = [v for v in indep_list if v in rels]

            if meta['linear']:
                jac = {w: _lin_jacs[name][w] for w in wrt}
                opt_prob.addConGroup(name,
                                     size,
                                     lower=lower,
                                     upper=upper,
                                     linear=True,
                                     wrt=wrt,
                                     jac=jac)
            else:
                if name in self._res_jacs:
                    resjac = self._res_jacs[name]
                    jac = {n: resjac[n] for n in wrt}
                else:
                    jac = None
                opt_prob.addConGroup(name,
                                     size,
                                     lower=lower,
                                     upper=upper,
                                     wrt=wrt,
                                     jac=jac)
                self._quantities.append(name)

        # Add all inequality constraints
        for name, meta in con_meta.items():
            if meta['equals'] is not None:
                continue
            size = meta['size']

            # Bounds - double sided is supported
            lower = meta['lower']
            upper = meta['upper']

            if fwd:
                wrt = [v for v in indep_list if name in relevant[v]]
            else:
                rels = relevant[name]
                wrt = [v for v in indep_list if v in rels]

            if meta['linear']:
                jac = {w: _lin_jacs[name][w] for w in wrt}
                opt_prob.addConGroup(name,
                                     size,
                                     upper=upper,
                                     lower=lower,
                                     linear=True,
                                     wrt=wrt,
                                     jac=jac)
            else:
                if name in self._res_jacs:
                    resjac = self._res_jacs[name]
                    jac = {n: resjac[n] for n in wrt}
                else:
                    jac = None
                opt_prob.addConGroup(name,
                                     size,
                                     upper=upper,
                                     lower=lower,
                                     wrt=wrt,
                                     jac=jac)
                self._quantities.append(name)

        # Instantiate the requested optimizer
        try:
            _tmp = __import__('pyoptsparse', globals(), locals(), [optimizer],
                              0)
            opt = getattr(_tmp, optimizer)()

        except Exception as err:
            # Change whatever pyopt gives us to an ImportError, give it a readable message,
            # but raise with the original traceback.
            msg = "Optimizer %s is not available in this installation." % optimizer
            raise ImportError(msg)

        # Process any default optimizer-specific settings.
        if optimizer in DEFAULT_OPT_SETTINGS:
            for name, value in DEFAULT_OPT_SETTINGS[optimizer].items():
                if name not in self.opt_settings:
                    self.opt_settings[name] = value

        # Set optimization options
        for option, value in self.opt_settings.items():
            opt.setOption(option, value)

        # Execute the optimization problem
        if self.options['gradient method'] == 'pyopt_fd':

            # Use pyOpt's internal finite difference
            # TODO: Need to get this from OpenMDAO
            # fd_step = problem.model.deriv_options['step_size']
            fd_step = 1e-6
            sol = opt(opt_prob,
                      sens='FD',
                      sensStep=fd_step,
                      storeHistory=self.hist_file,
                      hotStart=self.hotstart_file)

        elif self.options['gradient method'] == 'snopt_fd':
            if self.options['optimizer'] == 'SNOPT':

                # Use SNOPT's internal finite difference
                # TODO: Need to get this from OpenMDAO
                # fd_step = problem.model.deriv_options['step_size']
                fd_step = 1e-6
                sol = opt(opt_prob,
                          sens=None,
                          sensStep=fd_step,
                          storeHistory=self.hist_file,
                          hotStart=self.hotstart_file)

            else:
                raise Exception(
                    "SNOPT's internal finite difference can only be used with SNOPT"
                )
        else:

            # Use OpenMDAO's differentiator for the gradient
            sol = opt(opt_prob,
                      sens=weak_method_wrapper(self, '_gradfunc'),
                      storeHistory=self.hist_file,
                      hotStart=self.hotstart_file)

        # Print results
        if self.options['print_results']:
            print(sol)

        # Pull optimal parameters back into framework and re-run, so that
        # framework is left in the right final state
        dv_dict = sol.getDVs()
        for name in indep_list:
            self.set_design_var(name, dv_dict[name])

        with RecordingDebugging(self._get_name(), self.iter_count,
                                self) as rec:
            model.run_solve_nonlinear()
            rec.abs = 0.0
            rec.rel = 0.0
        self.iter_count += 1

        # Save the most recent solution.
        self.pyopt_solution = sol

        try:
            exit_status = sol.optInform['value']
            self.fail = False

            # These are various failed statuses.
            if optimizer == 'IPOPT':
                if exit_status not in {0, 1}:
                    self.fail = True
            elif exit_status > 2:
                self.fail = True

        except KeyError:
            # optimizers other than pySNOPT may not populate this dict
            pass

        # revert signal handler to cached version
        sigusr = self.options['user_teriminate_signal']
        if sigusr is not None:
            signal.signal(sigusr, self._signal_cache)
            self._signal_cache = None  # to prevent memory leak test from failing

        return self.fail
Esempio n. 20
0
    def run(self):
        """
        Excute pyOptsparse.

        Note that pyOpt controls the execution, and the individual optimizers
        (e.g., SNOPT) control the iteration.

        Returns
        -------
        boolean
            Failure flag; True if failed to converge, False is successful.
        """
        problem = self._problem
        model = problem.model
        relevant = model._relevant
        self.pyopt_solution = None
        self._total_jac = None
        self.iter_count = 0
        fwd = problem._mode == 'fwd'
        optimizer = self.options['optimizer']

        # Only need initial run if we have linear constraints or if we are using an optimizer that
        # doesn't perform one initially.
        con_meta = self._cons
        if optimizer in run_required or np.any([con['linear'] for con in itervalues(self._cons)]):
            with RecordingDebugging(optimizer, self.iter_count, self) as rec:
                # Initial Run
                model._solve_nonlinear()
                rec.abs = 0.0
                rec.rel = 0.0
            self.iter_count += 1

        # compute dynamic simul deriv coloring or just sparsity if option is set
        if coloring_mod._use_sparsity:
            if self.options['dynamic_simul_derivs']:
                coloring_mod.dynamic_simul_coloring(self, do_sparsity=True)
            elif self.options['dynamic_derivs_sparsity']:
                coloring_mod.dynamic_sparsity(self)

        opt_prob = Optimization(self.options['title'], self._objfunc)

        # Add all design variables
        param_meta = self._designvars
        self._indep_list = indep_list = list(param_meta)
        param_vals = self.get_design_var_values()

        for name, meta in iteritems(param_meta):
            opt_prob.addVarGroup(name, meta['size'], type='c',
                                 value=param_vals[name],
                                 lower=meta['lower'], upper=meta['upper'])

        opt_prob.finalizeDesignVariables()

        # Add all objectives
        objs = self.get_objective_values()
        for name in objs:
            opt_prob.addObj(name)
            self._quantities.append(name)

        # Calculate and save derivatives for any linear constraints.
        lcons = [key for (key, con) in iteritems(con_meta) if con['linear']]
        if len(lcons) > 0:
            _lin_jacs = self._compute_totals(of=lcons, wrt=indep_list, return_format='dict')
            # convert all of our linear constraint jacs to COO format. Otherwise pyoptsparse will
            # do it for us and we'll end up with a fully dense COO matrix and very slow evaluation
            # of linear constraints!
            to_remove = []
            for oname, jacdct in iteritems(_lin_jacs):
                for n, subjac in iteritems(jacdct):
                    if isinstance(subjac, np.ndarray):
                        # we can safely use coo_matrix to automatically convert the ndarray
                        # since our linear constraint jacs are constant, so zeros won't become
                        # nonzero during the optimization.
                        mat = coo_matrix(subjac)
                        if mat.row.size > 0:
                            # convert to 'coo' format here to avoid an emphatic warning
                            # by pyoptsparse.
                            jacdct[n] = {'coo': [mat.row, mat.col, mat.data], 'shape': mat.shape}

        # Add all equality constraints
        for name, meta in iteritems(con_meta):
            if meta['equals'] is None:
                continue
            size = meta['size']
            lower = upper = meta['equals']
            if fwd:
                wrt = [v for v in indep_list if name in relevant[v]]
            else:
                rels = relevant[name]
                wrt = [v for v in indep_list if v in rels]

            if meta['linear']:
                jac = {w: _lin_jacs[name][w] for w in wrt}
                opt_prob.addConGroup(name, size, lower=lower, upper=upper,
                                     linear=True, wrt=wrt, jac=jac)
            else:
                if name in self._res_jacs:
                    resjac = self._res_jacs[name]
                    jac = {n: resjac[n] for n in wrt}
                else:
                    jac = None
                opt_prob.addConGroup(name, size, lower=lower, upper=upper, wrt=wrt, jac=jac)
                self._quantities.append(name)

        # Add all inequality constraints
        for name, meta in iteritems(con_meta):
            if meta['equals'] is not None:
                continue
            size = meta['size']

            # Bounds - double sided is supported
            lower = meta['lower']
            upper = meta['upper']

            if fwd:
                wrt = [v for v in indep_list if name in relevant[v]]
            else:
                rels = relevant[name]
                wrt = [v for v in indep_list if v in rels]

            if meta['linear']:
                jac = {w: _lin_jacs[name][w] for w in wrt}
                opt_prob.addConGroup(name, size, upper=upper, lower=lower,
                                     linear=True, wrt=wrt, jac=jac)
            else:
                if name in self._res_jacs:
                    resjac = self._res_jacs[name]
                    jac = {n: resjac[n] for n in wrt}
                else:
                    jac = None
                opt_prob.addConGroup(name, size, upper=upper, lower=lower, wrt=wrt, jac=jac)
                self._quantities.append(name)

        # Instantiate the requested optimizer
        try:
            _tmp = __import__('pyoptsparse', globals(), locals(), [optimizer], 0)
            opt = getattr(_tmp, optimizer)()

        except Exception as err:
            # Change whatever pyopt gives us to an ImportError, give it a readable message,
            # but raise with the original traceback.
            msg = "Optimizer %s is not available in this installation." % optimizer
            reraise(ImportError, ImportError(msg), sys.exc_info()[2])

        # Set optimization options
        for option, value in self.opt_settings.items():
            opt.setOption(option, value)

        # Execute the optimization problem
        if self.options['gradient method'] == 'pyopt_fd':

            # Use pyOpt's internal finite difference
            # TODO: Need to get this from OpenMDAO
            # fd_step = problem.root.deriv_options['step_size']
            fd_step = 1e-6
            sol = opt(opt_prob, sens='FD', sensStep=fd_step, storeHistory=self.hist_file,
                      hotStart=self.hotstart_file)

        elif self.options['gradient method'] == 'snopt_fd':
            if self.options['optimizer'] == 'SNOPT':

                # Use SNOPT's internal finite difference
                # TODO: Need to get this from OpenMDAO
                # fd_step = problem.root.deriv_options['step_size']
                fd_step = 1e-6
                sol = opt(opt_prob, sens=None, sensStep=fd_step, storeHistory=self.hist_file,
                          hotStart=self.hotstart_file)

            else:
                msg = "SNOPT's internal finite difference can only be used with SNOPT"
                raise Exception(msg)
        else:

            # Use OpenMDAO's differentiator for the gradient
            sol = opt(opt_prob, sens=self._gradfunc, storeHistory=self.hist_file,
                      hotStart=self.hotstart_file)

        # Print results
        if self.options['print_results']:
            print(sol)

        # Pull optimal parameters back into framework and re-run, so that
        # framework is left in the right final state
        dv_dict = sol.getDVs()
        for name in indep_list:
            val = dv_dict[name]
            self.set_design_var(name, val)

        with RecordingDebugging(self.options['optimizer'], self.iter_count, self) as rec:
            model._solve_nonlinear()
            rec.abs = 0.0
            rec.rel = 0.0
        self.iter_count += 1

        # Save the most recent solution.
        self.pyopt_solution = sol
        try:
            exit_status = sol.optInform['value']
            self.fail = False

            # These are various failed statuses.
            if exit_status > 2:
                self.fail = True

        except KeyError:
            # optimizers other than pySNOPT may not populate this dict
            pass

        return self.fail
    def run(self, problem):
        """pyOpt execution. Note that pyOpt controls the execution, and the
        individual optimizers (i.e., SNOPT) control the iteration.

        Args
        ----
        problem : `Problem`
            Our parent `Problem`.
        """

        self.pyopt_solution = None
        rel = problem.root._probdata.relevance

        # Metadata Setup
        self.metadata = create_local_meta(None, self.options['optimizer'])
        self.iter_count = 0
        update_local_meta(self.metadata, (self.iter_count,))

        # Initial Run
        with problem.root._dircontext:
            problem.root.solve_nonlinear(metadata=self.metadata)

        opt_prob = Optimization(self.options['title'], self._objfunc)

        # Add all parameters
        param_meta = self.get_desvar_metadata()
        self.indep_list = indep_list = list(param_meta)
        param_vals = self.get_desvars()

        for name, meta in iteritems(param_meta):
            opt_prob.addVarGroup(name, meta['size'], type='c',
                                 value=param_vals[name],
                                 lower=meta['lower'], upper=meta['upper'])

        opt_prob.finalizeDesignVariables()

        # Figure out parameter subsparsity for paramcomp index connections.
        # sub_param_conns is empty unless there are some index conns.
        # full_param_conns gets filled with the connections to the entire
        # parameter so that those params can be filtered out of the sparse
        # set if the full path is also relevant
        sub_param_conns = {}
        full_param_conns = {}
        for name in indep_list:
            pathname = problem.root.unknowns.metadata(name)['pathname']
            sub_param_conns[name] = {}
            full_param_conns[name] = set()
            for target, info in iteritems(problem.root.connections):
                src, indices = info
                if src == pathname:
                    if indices is not None:
                        # Need to map the connection indices onto the desvar
                        # indices if both are declared.
                        dv_idx = param_meta[name].get('indices')
                        indices = set(indices)
                        if dv_idx is not None:
                            indices.intersection_update(dv_idx)
                            ldv_idx = list(dv_idx)
                            mapped_idx = [ldv_idx.index(item) for item in indices]
                            sub_param_conns[name][target] = mapped_idx
                        else:
                            sub_param_conns[name][target] = indices
                    else:
                        full_param_conns[name].add(target)

        # Add all objectives
        objs = self.get_objectives()
        self.quantities = list(objs)
        self.sparsity = OrderedDict()
        self.sub_sparsity = OrderedDict()
        for name in objs:
            opt_prob.addObj(name)
            self.sparsity[name] = self.indep_list

        # Calculate and save gradient for any linear constraints.
        lcons = self.get_constraints(lintype='linear').keys()
        if len(lcons) > 0:
            self.lin_jacs = problem.calc_gradient(indep_list, lcons,
                                                  return_format='dict')
            #print("Linear Gradient")
            #print(self.lin_jacs)

        # Add all equality constraints
        econs = self.get_constraints(ctype='eq', lintype='nonlinear')
        con_meta = self.get_constraint_metadata()
        self.quantities += list(econs)

        for name in self.get_constraints(ctype='eq'):
            meta = con_meta[name]
            size = meta['size']
            lower = upper = meta['equals']

            # Sparsify Jacobian via relevance
            rels = rel.relevant[name]
            wrt = rels.intersection(indep_list)
            self.sparsity[name] = wrt

            if meta['linear']:
                opt_prob.addConGroup(name, size, lower=lower, upper=upper,
                                     linear=True, wrt=wrt,
                                     jac=self.lin_jacs[name])
            else:

                jac = self._build_sparse(name, wrt, size, param_vals,
                                         sub_param_conns, full_param_conns, rels)
                opt_prob.addConGroup(name, size, lower=lower, upper=upper,
                                     wrt=wrt, jac=jac)

        # Add all inequality constraints
        incons = self.get_constraints(ctype='ineq', lintype='nonlinear')
        self.quantities += list(incons)

        for name in self.get_constraints(ctype='ineq'):
            meta = con_meta[name]
            size = meta['size']

            # Bounds - double sided is supported
            lower = meta['lower']
            upper = meta['upper']

            # Sparsify Jacobian via relevance
            rels = rel.relevant[name]
            wrt = rels.intersection(indep_list)
            self.sparsity[name] = wrt

            if meta['linear']:
                opt_prob.addConGroup(name, size, upper=upper, lower=lower,
                                     linear=True, wrt=wrt,
                                     jac=self.lin_jacs[name])
            else:

                jac = self._build_sparse(name, wrt, size, param_vals,
                                         sub_param_conns, full_param_conns, rels)
                opt_prob.addConGroup(name, size, upper=upper, lower=lower,
                                     wrt=wrt, jac=jac)

        # Instantiate the requested optimizer
        optimizer = self.options['optimizer']
        try:
            exec('from pyoptsparse import %s' % optimizer)
        except ImportError:
            msg = "Optimizer %s is not available in this installation." % \
                   optimizer
            raise ImportError(msg)

        optname = vars()[optimizer]
        opt = optname()

        #Set optimization options
        for option, value in self.opt_settings.items():
            opt.setOption(option, value)

        self._problem = problem

        # Execute the optimization problem
        if self.options['pyopt_diff']:
            # Use pyOpt's internal finite difference
            fd_step = problem.root.fd_options['step_size']
            sol = opt(opt_prob, sens='FD', sensStep=fd_step, storeHistory=self.hist_file)
        else:
            # Use OpenMDAO's differentiator for the gradient
            sol = opt(opt_prob, sens=self._gradfunc, storeHistory=self.hist_file)

        self._problem = None

        # Print results
        if self.options['print_results']:
            print(sol)

        # Pull optimal parameters back into framework and re-run, so that
        # framework is left in the right final state
        dv_dict = sol.getDVs()
        for name in indep_list:
            val = dv_dict[name]
            self.set_desvar(name, val)

        with self.root._dircontext:
            self.root.solve_nonlinear(metadata=self.metadata)

        # Save the most recent solution.
        self.pyopt_solution = sol
        try:
            exit_status = sol.optInform['value']
            self.exit_flag = 1
            if exit_status > 2: # bad
                self.exit_flag = 0
        except KeyError: #nothing is here, so something bad happened!
            self.exit_flag = 0
Esempio n. 22
0
    def run(self):
        """
        Excute pyOptsparse.

        Note that pyOpt controls the execution, and the individual optimizers
        (e.g., SNOPT) control the iteration.

        Returns
        -------
        boolean
            Failure flag; True if failed to converge, False is successful.
        """
        problem = self._problem
        model = problem.model
        relevant = model._relevant
        self.pyopt_solution = None
        self.iter_count = 0
        fwd = problem._mode == 'fwd'

        # Metadata Setup
        self.metadata = create_local_meta(self.options['optimizer'])

        with Recording(self.options['optimizer'], self.iter_count,
                       self) as rec:
            # Initial Run
            model._solve_nonlinear()
            rec.abs = 0.0
            rec.rel = 0.0
        self.iter_count += 1

        opt_prob = Optimization(self.options['title'], self._objfunc)

        # Add all design variables
        param_meta = self._designvars
        self._indep_list = indep_list = list(param_meta)
        param_vals = self.get_design_var_values()

        for name, meta in iteritems(param_meta):
            opt_prob.addVarGroup(name,
                                 meta['size'],
                                 type='c',
                                 value=param_vals[name],
                                 lower=meta['lower'],
                                 upper=meta['upper'])

        opt_prob.finalizeDesignVariables()

        # Add all objectives
        objs = self.get_objective_values()
        for name in objs:
            opt_prob.addObj(name)
            self._quantities.append(name)

        # Calculate and save derivatives for any linear constraints.
        con_meta = self._cons
        lcons = [
            key for (key, con) in iteritems(con_meta) if con['linear'] is True
        ]
        if len(lcons) > 0:
            _lin_jacs = problem._compute_totals(of=lcons,
                                                wrt=indep_list,
                                                return_format='dict')

        # Add all equality constraints
        self.active_tols = {}
        eqcons = OrderedDict((key, con) for (key, con) in iteritems(con_meta)
                             if con['equals'] is not None)
        for name, meta in iteritems(eqcons):
            size = meta['size']
            lower = upper = meta['equals']
            if fwd:
                wrt = [v for v in indep_list if name in relevant[v]]
            else:
                rels = relevant[name]
                wrt = [v for v in indep_list if v in rels]

            if meta['linear']:
                opt_prob.addConGroup(name,
                                     size,
                                     lower=lower,
                                     upper=upper,
                                     linear=True,
                                     wrt=wrt,
                                     jac=_lin_jacs[name])
            else:
                opt_prob.addConGroup(name,
                                     size,
                                     lower=lower,
                                     upper=upper,
                                     wrt=wrt)
                self._quantities.append(name)

        # Add all inequality constraints
        iqcons = OrderedDict((key, con) for (key, con) in iteritems(con_meta)
                             if con['equals'] is None)
        for name, meta in iteritems(iqcons):
            size = meta['size']

            # Bounds - double sided is supported
            lower = meta['lower']
            upper = meta['upper']

            if fwd:
                wrt = [v for v in indep_list if name in relevant[v]]
            else:
                rels = relevant[name]
                wrt = [v for v in indep_list if v in rels]

            if meta['linear']:
                opt_prob.addConGroup(name,
                                     size,
                                     upper=upper,
                                     lower=lower,
                                     linear=True,
                                     wrt=wrt,
                                     jac=_lin_jacs[name])
            else:
                opt_prob.addConGroup(name,
                                     size,
                                     upper=upper,
                                     lower=lower,
                                     wrt=wrt)
                self._quantities.append(name)

        # Instantiate the requested optimizer
        optimizer = self.options['optimizer']
        try:
            _tmp = __import__('pyoptsparse', globals(), locals(), [optimizer],
                              0)
            opt = getattr(_tmp, optimizer)()

        except ImportError:
            msg = "Optimizer %s is not available in this installation." % optimizer
            raise ImportError(msg)

        # Set optimization options
        for option, value in self.opt_settings.items():
            opt.setOption(option, value)

        self.opt_prob = opt_prob
        self.opt = opt

        # Execute the optimization problem
        if self.options['gradient method'] == 'pyopt_fd':

            # Use pyOpt's internal finite difference
            # TODO: Need to get this from OpenMDAO
            # fd_step = problem.root.deriv_options['step_size']
            fd_step = 1e-6
            sol = opt(opt_prob,
                      sens='FD',
                      sensStep=fd_step,
                      storeHistory=self.hist_file,
                      hotStart=self.hotstart_file)

        elif self.options['gradient method'] == 'snopt_fd':
            if self.options['optimizer'] == 'SNOPT':

                # Use SNOPT's internal finite difference
                # TODO: Need to get this from OpenMDAO
                # fd_step = problem.root.deriv_options['step_size']
                fd_step = 1e-6
                sol = opt(opt_prob,
                          sens=None,
                          sensStep=fd_step,
                          storeHistory=self.hist_file,
                          hotStart=self.hotstart_file)

            else:
                msg = "SNOPT's internal finite difference can only be used with SNOPT"
                raise Exception(msg)
        else:

            # Use OpenMDAO's differentiator for the gradient
            sol = opt(opt_prob,
                      sens=self._gradfunc,
                      storeHistory=self.hist_file,
                      hotStart=self.hotstart_file)

        # Print results
        if self.options['print_results']:
            print(sol)

        # Pull optimal parameters back into framework and re-run, so that
        # framework is left in the right final state
        dv_dict = sol.getDVs()
        for name in indep_list:
            val = dv_dict[name]
            self.set_design_var(name, val)

        with Recording(self.options['optimizer'], self.iter_count,
                       self) as rec:
            model._solve_nonlinear()
            rec.abs = 0.0
            rec.rel = 0.0
        self.iter_count += 1

        # Save the most recent solution.
        self.pyopt_solution = sol
        try:
            exit_status = sol.optInform['value']
            self.fail = False

            # These are various failed statuses.
            if exit_status > 2:
                self.fail = True

        except KeyError:
            # Nothing is here, so something bad happened!
            self.fail = True

        return self.fail