Esempio n. 1
0
    def current_solution(self, curr_design, curr_state, curr_adj,
                         curr_dual, num_iter):
        """
        Kona will evaluate this method at every outer optimization iteration.
        It can be used to print out useful information to monitor the process,
        or to save design points of the intermediate iterations.

        The current design vector, current state vector and current adjoint
        vector have been made available to the user via the arguments.

        Parameters
        ----------
        curr_design : BaseVector
            Current design point.
        curr_state : BaseVector
            Current state variables.
        curr_adj : BaseVector
            Currently adjoint variables for the objective.
        curr_dual : BaseVector
            Current dual vector in storage. (This might be unnecessary!)
        num_iter : int
            Current outer iteration number.
        """
        self.curr_design = curr_design.data
        self.num_iter = num_iter
        self.iter_count += 1

        self._push_primal_nl(curr_design)
        self._push_state_nl(curr_state)

        update_local_meta(self.metadata, (self.iter_count,))
        self.recorders.record_iteration(self.system, self.metadata)
Esempio n. 2
0
    def run(self, problem):
        """Build a runlist and execute the Problem for each set of generated
        parameters.
        """
        self.iter_count = 0

        if MPI and self._num_par_doe > 1:
            if self._load_balance:
                runlist = self._distrib_lb_build_runlist()
                if self._full_comm.rank == 0:
                    try:
                        next(runlist)
                    except StopIteration:
                        pass
                    return  # we're done sending cases
            else:
                runlist = self._distrib_build_runlist()
        else:
            runlist = self._build_runlist()

        # For each runlist entry, run the system and record the results
        for run in runlist:
            for dv_name, dv_val in run:
                self.set_desvar(dv_name, dv_val)

            metadata = create_local_meta(None, 'Driver')

            update_local_meta(metadata, (self.iter_count, ))
            problem.root.solve_nonlinear(metadata=metadata)
            self.recorders.record_iteration(problem.root, metadata)
            self.iter_count += 1
    def run(self, problem):
        """Build a runlist and execute the Problem for each set of generated
        parameters.
        """
        self.iter_count = 0

        if MPI and self._num_par_doe > 1:
            if self._load_balance:
                runlist = self._distrib_lb_build_runlist()
                if self._full_comm.rank == 0:
                    try:
                        next(runlist)
                    except StopIteration:
                        pass
                    return # we're done sending cases
            else:
                runlist = self._distrib_build_runlist()
        else:
            runlist = self._build_runlist()

        # For each runlist entry, run the system and record the results
        for run in runlist:
            for dv_name, dv_val in run:
                self.set_desvar(dv_name, dv_val)

            metadata = create_local_meta(None, 'Driver')

            update_local_meta(metadata, (self.iter_count,))
            problem.root.solve_nonlinear(metadata=metadata)
            self.recorders.record_iteration(problem.root, metadata)
            self.iter_count += 1
Esempio n. 4
0
    def solve(self, params, unknowns, resids, system, metadata=None):
        """ Executes each item in the system hierarchy sequentially.

        Args
        ----
        params : `VecWrapper`
            `VecWrapper` containing parameters. (p)

        unknowns : `VecWrapper`
            `VecWrapper` containing outputs and states. (u)

        resids : `VecWrapper`
            `VecWrapper` containing residuals. (r)

        system : `System`
            Parent `System` object.
        """

        self.iter_count += 1
        # Metadata setup
        local_meta = create_local_meta(metadata, system.name)
        system.ln_solver.local_meta = local_meta
        update_local_meta(local_meta, (self.iter_count, ))

        system.children_solve_nonlinear(local_meta)
        self.recorders.record_iteration(system, local_meta)
Esempio n. 5
0
    def solve(self, params, unknowns, resids, system, metadata=None):
        """ Executes each item in the system hierarchy sequentially.

        Args
        ----
        params : `VecWrapper`
            `VecWrapper` containing parameters. (p)

        unknowns : `VecWrapper`
            `VecWrapper` containing outputs and states. (u)

        resids : `VecWrapper`
            `VecWrapper` containing residuals. (r)

        system : `System`
            Parent `System` object.
        """

        self.iter_count += 1
        # Metadata setup
        local_meta = create_local_meta(metadata, system.name)
        system.ln_solver.local_meta = local_meta
        update_local_meta(local_meta, (self.iter_count,))

        system.children_solve_nonlinear(local_meta)

        for recorder in self.recorders:
            recorder.raw_record(system.params, system.unknowns, system.resids, local_meta)
Esempio n. 6
0
 def _prep_case(self, case):
     """Create metadata for the case and set design variables.
     """
     metadata = create_local_meta(None, 'Driver')
     update_local_meta(metadata, (self.iter_count, ))
     for dv_name, dv_val in case:
         self.set_desvar(dv_name, dv_val)
     return metadata
Esempio n. 7
0
 def _prep_case(self, case, iter_count):
     """Create metadata for the case and set design variables.
     """
     metadata = create_local_meta(None, 'Driver')
     update_local_meta(metadata, (iter_count,))
     for dv_name, dv_val in case:
         self.set_desvar(dv_name, dv_val)
     return metadata
Esempio n. 8
0
    def test_format_coord(self):
        name = 'Sub'
        coord = (1, 2, 3)

        meta = create_local_meta(self.meta, name)
        update_local_meta(meta, coord)

        s = format_iteration_coordinate(meta['coord'])

        self.assertEqual(s, '/0/Sub/1-2-3')
Esempio n. 9
0
    def run(self, problem):
        self.problem = problem

        optns = self.options['algorithm_options']
        algorithm = self.options['algorithm']

        self.iter_count = 0
        self.metadata = create_local_meta(None, 'Kona')
        update_local_meta(self.metadata, (self.iter_count,))

        optimizer = kona.Optimizer(self, algorithm, optns)
        optimizer.solve()
Esempio n. 10
0
    def _eval(self, x, params, unknowns, resids):
        """Callback function for evaluating f(x)"""
        
        self.iter_count += 1
        update_local_meta(self.local_meta, (self.iter_count,))

        unknowns[self.s_var_name] = x
        self.sys.children_solve_nonlinear(self.local_meta)
        self.sys.apply_nonlinear(params, unknowns, resids)

        self.recorders.record_iteration(self.sys, self.local_meta)

        return resids[self.s_var_name]
Esempio n. 11
0
    def _eval(self, x, params, unknowns, resids):
        """Callback function for evaluating f(x)"""

        self.iter_count += 1
        update_local_meta(self.local_meta, (self.iter_count, ))

        unknowns[self.s_var_name] = x
        self.sys.children_solve_nonlinear(self.local_meta)
        self.sys.apply_nonlinear(params, unknowns, resids)

        self.recorders.record_iteration(self.sys, self.local_meta)

        return resids[self.s_var_name]
Esempio n. 12
0
    def _objfunc(self, x_new):
        """ Function that evaluates and returns the objective function. Model
        is executed here.

        Args
        ----
        x_new : ndarray
            Array containing parameter values at new design point.

        Returns
        -------
        float
            Value of the objective function evaluated at the new design point.
        """

        system = self.root
        metadata = self.metadata

        # Pass in new parameters
        i = 0
        for name, meta in self.get_desvar_metadata().items():
            size = meta['size']
            self.set_desvar(name, x_new[i:i + size])
            i += size

        self.iter_count += 1
        update_local_meta(metadata, (self.iter_count, ))

        with system._dircontext:
            system.solve_nonlinear(metadata=metadata)

        # Get the objective function evaluations
        for name, obj in self.get_objectives().items():
            f_new = obj
            break

        self.con_cache = self.get_constraints()

        # Record after getting obj and constraints to assure it has been
        # gathered in MPI.
        self.recorders.record_iteration(system, metadata)

        #print("Functions calculated")
        #print(x_new)
        #print(f_new)

        return f_new
Esempio n. 13
0
    def _objfunc(self, x_new):
        """ Function that evaluates and returns the objective function. Model
        is executed here.

        Args
        ----
        x_new : ndarray
            Array containing parameter values at new design point.

        Returns
        -------
        float
            Value of the objective function evaluated at the new design point.
        """

        system = self.root
        metadata = self.metadata

        # Pass in new parameters
        i = 0
        for name, meta in self.get_desvar_metadata().items():
            size = meta['size']
            self.set_desvar(name, x_new[i:i+size])
            i += size

        self.iter_count += 1
        update_local_meta(metadata, (self.iter_count,))

        with system._dircontext:
            system.solve_nonlinear(metadata=metadata)

        # Get the objective function evaluations
        for name, obj in self.get_objectives().items():
            f_new = obj
            break

        self.con_cache = self.get_constraints()

        # Record after getting obj and constraints to assure it has been
        # gathered in MPI.
        self.recorders.record_iteration(system, metadata)

        #print("Functions calculated")
        #print(x_new)
        #print(f_new)

        return f_new
Esempio n. 14
0
    def run_one(self, problem, run):
        for dv_name, dv_val in run:
            self.set_desvar(dv_name, dv_val)

        metadata = create_local_meta(None, 'Driver')

        update_local_meta(metadata, (self.iter_count, ))

        try:
            problem.root.solve_nonlinear(metadata=metadata)
        except AnalysisError:
            metadata['msg'] = traceback.format_exc()
            metadata['success'] = 0
        self.recorders.record_iteration(problem.root, metadata)
        self.iter_count += 1
        if self.use_restart:
            self.restart.record_iteration()
Esempio n. 15
0
    def run_one(self, problem, run):
        for dv_name, dv_val in run:
            self.set_desvar(dv_name, dv_val)

        metadata = create_local_meta(None, 'Driver')

        update_local_meta(metadata, (self.iter_count,))

        try:
            problem.root.solve_nonlinear(metadata=metadata)
        except AnalysisError:
            metadata['msg'] = traceback.format_exc()
            metadata['success'] = 0
        self.recorders.record_iteration(problem.root, metadata)
        self.iter_count += 1
        if self.use_restart:
            self.restart.record_iteration()
Esempio n. 16
0
    def objfunc(self, x_new):
        """ Function that evaluates and returns the objective function. Model
        is executed here.

        Args
        ----
        x_new : ndarray
            Array containing parameter values at new design point.

        Returns
        -------
        float
            Value of the objective function evaluated at the new design point.
        """

        system = self.root
        metadata = self.metadata

        # Pass in new parameters
        i = 0
        for name, meta in self.get_param_metadata().items():
            size = meta['size']
            self.set_param(name, x_new[i:i+size])
            i += size

        self.iter_count += 1
        update_local_meta(metadata, (self.iter_count,))

        system.solve_nonlinear(metadata=metadata)
        for recorder in self.recorders:
            recorder.raw_record(system.params, system.unknowns,
                                system.resids, metadata)

        # Get the objective function evaluations
        for name, obj in self.get_objectives().items():
            f_new = obj
            break

        #print("Functions calculated")
        #print(x_new)
        #print(f_new)

        return f_new
Esempio n. 17
0
    def _eval(self, x, params, unknowns, resids):
        """Callback function for evaluating f(x)"""

        idx = self.options["state_var_idx"]
        self.iter_count += 1
        update_local_meta(self.local_meta, (self.iter_count,))

        unknowns._dat[self.s_var_name].val[idx] = x

        self.sys.children_solve_nonlinear(self.local_meta)
        self.sys.apply_nonlinear(params, unknowns, resids)

        self.recorders.record_iteration(self.sys, self.local_meta)

        if self.options["iprint"] > 0:
            normval = abs(resids._dat[self.s_var_name].val[idx])
            self.print_norm(self.print_name, self.sys.pathname, self.iter_count, normval, self.basenorm)

        return resids._dat[self.s_var_name].val[idx]
Esempio n. 18
0
    def objfunc(self, x_new):
        """ Function that evaluates and returns the objective function. Model
        is executed here.

        Args
        ----
        x_new : ndarray
            Array containing parameter values at new design point.

        Returns
        -------
        float
            Value of the objective function evaluated at the new design point.
        """

        system = self.root
        metadata = self.metadata

        # Pass in new parameters
        i = 0
        for name, meta in self.get_param_metadata().items():
            size = meta['size']
            self.set_param(name, x_new[i:i + size])
            i += size

        self.iter_count += 1
        update_local_meta(metadata, (self.iter_count, ))

        system.solve_nonlinear(metadata=metadata)
        for recorder in self.recorders:
            recorder.raw_record(system.params, system.unknowns, system.resids,
                                metadata)

        # Get the objective function evaluations
        for name, obj in self.get_objectives().items():
            f_new = obj
            break

        #print("Functions calculated")
        #print(x_new)
        #print(f_new)

        return f_new
Esempio n. 19
0
    def run(self, problem):
        """ Runs the driver. This function should be overriden when inheriting.

        Args
        ----
        problem : `Problem`
            Our parent `Problem`.
        """
        system = problem.root

        # Metadata Setup
        self.iter_count += 1
        metadata = create_local_meta(None, 'Driver')
        system.ln_solver.local_meta = metadata
        update_local_meta(metadata, (self.iter_count,))

        # Solve the system once and record results.
        system.solve_nonlinear(metadata=metadata)

        self.recorders.record_iteration(system, metadata)
Esempio n. 20
0
    def run(self, problem):
        """ Runs the driver. This function should be overriden when inheriting.

        Args
        ----
        problem : `Problem`
            Our parent `Problem`.
        """
        system = problem.root

        # Metadata Setup
        self.iter_count += 1
        metadata = create_local_meta(None, 'Driver')
        system.ln_solver.local_meta = metadata
        update_local_meta(metadata, (self.iter_count,))

        # Solve the system once and record results.
        system.solve_nonlinear(metadata=metadata)

        self.recorders.record_iteration(system, metadata)
Esempio n. 21
0
    def _eval(self, x, params, unknowns, resids):
        """Callback function for evaluating f(x)"""

        idx = self.options['state_var_idx']
        self.iter_count += 1
        update_local_meta(self.local_meta, (self.iter_count, ))

        unknowns._dat[self.s_var_name].val[idx] = x

        self.sys.children_solve_nonlinear(self.local_meta)
        self.sys.apply_nonlinear(params, unknowns, resids)

        self.recorders.record_iteration(self.sys, self.local_meta)

        if self.options['iprint'] > 0:
            normval = abs(resids._dat[self.s_var_name].val[idx])
            self.print_norm(self.print_name, self.sys.pathname,
                            self.iter_count, normval, self.basenorm)

        return resids._dat[self.s_var_name].val[idx]
Esempio n. 22
0
    def run(self, problem):
        """ Runs the driver. This function should be overriden when inheriting.

        Args
        ----
        problem : `Problem`
            Our parent `Problem`.
        """
        system = problem.root

        # Metadata Setup
        self.iter_count += 1
        metadata = create_local_meta(None, "Driver")
        system.ln_solver.local_meta = metadata
        update_local_meta(metadata, (self.iter_count,))

        # Solve the system once and record results.
        system.solve_nonlinear(metadata=metadata)
        for recorder in self.recorders:
            recorder.raw_record(system.params, system.unknowns, system.resids, metadata)
Esempio n. 23
0
    def run_once(self, problem):
        """ Runs root's solve_nonlinear one time

        Args
        ----
        problem : `Problem`
            Our parent `Problem`.
        """
        system = problem.root

        # Metadata Setup
        self.iter_count += 1
        metadata = self.metadata = create_local_meta(None, 'Driver')
        system.ln_solver.local_meta = metadata
        update_local_meta(metadata, (self.iter_count,))

        # Solve the system once and record results.
        with system._dircontext:
            system.solve_nonlinear(metadata=metadata)

        self.recorders.record_iteration(system, metadata)
    def run(self, problem):
        """ Save away scaled info."""

        self._problem = problem
        self.metadata = create_local_meta(None, 'test')
        self.iter_count = 0
        update_local_meta(self.metadata, (self.iter_count,))

        params = self.get_desvars()
        param_meta = self.get_desvar_metadata()

        self.set_desvar('x', np.array([22.0, 404.0, 9009.0, 121000.0]))
        problem.root.solve_nonlinear()

        objective = self.get_objectives()
        constraint = self.get_constraints()

        # Stuff we saved should be in the scaled coordinates.
        self.param = params['x']
        self.obj_scaled = objective['y']
        self.con_scaled = constraint['con']
        self.param_low = param_meta['x']['lower']
Esempio n. 25
0
    def run(self, problem):
        """Build a runlist and execute the Problem for each set of generated
        parameters.
        """
        self.iter_count = 0

        if MPI and self._num_par_doe > 1:
            runlist = self._distrib_build_runlist()
        else:
            runlist = self._build_runlist()

        # For each runlist entry, run the system and record the results
        for run in runlist:
            for dv_name, dv_val in run:
                self.set_desvar(dv_name, dv_val)

            metadata = create_local_meta(None, 'Driver')

            update_local_meta(metadata, (self.iter_count,))
            problem.root.solve_nonlinear(metadata=metadata)
            self.recorders.record_iteration(problem.root, metadata)
            self.iter_count += 1
Esempio n. 26
0
    def run(self, problem):
        """Build a runlist and execute the Problem for each set of generated
        parameters.
        """
        self.iter_count = 0

        if MPI and self._num_par_doe > 1:
            runlist = self._distrib_build_runlist()
        else:
            runlist = self._build_runlist()

        # For each runlist entry, run the system and record the results
        for run in runlist:
            for dv_name, dv_val in run:
                self.set_desvar(dv_name, dv_val)

            metadata = create_local_meta(None, 'Driver')

            update_local_meta(metadata, (self.iter_count,))
            problem.root.solve_nonlinear(metadata=metadata)
            self.recorders.record_iteration(problem.root, metadata)
            self.iter_count += 1
Esempio n. 27
0
    def run(self, problem):
        """ Save away scaled info."""

        self._problem = problem
        self.metadata = create_local_meta(None, 'test')
        self.iter_count = 0
        update_local_meta(self.metadata, (self.iter_count,))

        params = self.get_desvars()
        param_meta = self.get_desvar_metadata()

        self.set_desvar('x', np.array([22.0, 404.0, 9009.0, 121000.0]))
        problem.root.solve_nonlinear()

        objective = self.get_objectives()
        constraint = self.get_constraints()

        # Stuff we saved should be in the scaled coordinates.
        self.param = params['x']
        self.obj_scaled = objective['y']
        self.con_scaled = constraint['con']
        self.param_low = param_meta['x']['lower']
    def run(self, problem):
        """Build a runlist and execute the Problem for each set of generated
        parameters.
        """
        self.iter_count = 0

        if MPI and self._num_par_doe > 1:
            if self._load_balance:
                runlist = self._distrib_lb_build_runlist()
            else:
                runlist = self._get_case_w_nones(self._distrib_build_runlist())
        else:
            runlist = self._build_runlist()

        with problem.root._dircontext:
            # For each runlist entry, run the system and record the results
            for case in runlist:

                if MPI and self._load_balance and self._full_comm.rank == 0:
                    # we're the master rank and case is a completed case
                    self.recorders.record_case(problem.root, case)
                elif case is not None: # dummy cases have case == None
                    metadata = create_local_meta(None, 'Driver')
                    update_local_meta(metadata, (self.iter_count,))
                    for dv_name, dv_val in case:
                        self.set_desvar(dv_name, dv_val)

                    problem.root.solve_nonlinear(metadata=metadata)

                    if self._load_balance:
                        # keep meta for worker to send to master
                        self._last_meta = metadata

                if not MPI or not self._load_balance:
                    self.recorders.record_iteration(problem.root, metadata,
                                                    dummy=(case is None))

                self.iter_count += 1
Esempio n. 29
0
    def run(self, problem):
        """ Save away scaled info."""

        self._problem = problem
        self.metadata = create_local_meta(None, "test")
        self.iter_count = 0
        update_local_meta(self.metadata, (self.iter_count,))

        params = self.get_desvars()
        param_meta = self.get_desvar_metadata()

        self.set_desvar("x", 0.5)
        problem.root.solve_nonlinear()

        objective = self.get_objectives()
        constraint = self.get_constraints()

        # Stuff we saved should be in the scaled coordinates.
        self.param = params["x"]
        self.obj_scaled = objective["f_xy"]
        self.con_scaled = constraint["con"]
        self.param_high = param_meta["x"]["upper"]
        self.param_low = param_meta["x"]["lower"]
Esempio n. 30
0
    def test_integer_coord(self):

        update_local_meta(self.meta, 2)

        self.assertEqual(self.meta['coord'], ['', (2,)])
Esempio n. 31
0
    def solve(self, params, unknowns, resids, system, metadata=None):
        """ Solves the system using a Netwon's Method.

        Args
        ----
        params : `VecWrapper`
            `VecWrapper` containing parameters. (p)

        unknowns : `VecWrapper`
            `VecWrapper` containing outputs and states. (u)

        resids : `VecWrapper`
            `VecWrapper` containing residuals. (r)

        system : `System`
            Parent `System` object.

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        atol = self.options['atol']
        rtol = self.options['rtol']
        maxiter = self.options['maxiter']
        alpha = self.options['alpha']

        # Metadata setup
        self.iter_count = 0
        local_meta = create_local_meta(metadata, system.pathname)
        system.ln_solver.local_meta = local_meta
        update_local_meta(local_meta, (self.iter_count, 0))

        # Perform an initial run to propagate srcs to targets.
        system.children_solve_nonlinear(local_meta)
        system.apply_nonlinear(params, unknowns, resids)

        f_norm = resids.norm()
        f_norm0 = f_norm

        if self.options['iprint'] > 0:
            self.print_norm(self.print_name, system.pathname, 0, f_norm,
                            f_norm0)

        arg = system.drmat[None]
        result = system.dumat[None]

        while self.iter_count < maxiter and f_norm > atol and \
                f_norm/f_norm0 > rtol:

            # Linearize Model with partial derivatives
            system._sys_linearize(params, unknowns, resids, total_derivs=False)

            # Calculate direction to take step
            arg.vec[:] = resids.vec
            system.solve_linear(system.dumat, system.drmat, [None], mode='fwd')

            # Step in that direction,
            self.iter_count += 1
            f_norm = self.line_search.solve(params, unknowns, resids, system,
                                            self, alpha, f_norm0, metadata)

        # Need to make sure the whole workflow is executed at the final
        # point, not just evaluated.
        #self.iter_count += 1
        #update_local_meta(local_meta, (self.iter_count, 0))
        #system.children_solve_nonlinear(local_meta)

        if self.options['iprint'] > 0:

            if self.iter_count == maxiter or isnan(f_norm):
                msg = 'FAILED to converge after max iterations'
            else:
                msg = 'converged'

            self.print_norm(self.print_name,
                            system.pathname,
                            self.iter_count,
                            f_norm,
                            f_norm0,
                            msg=msg)
Esempio n. 32
0
    def run(self, problem):
        """pyOpt execution. Note that pyOpt controls the execution, and the
        individual optimizers (i.e., SNOPT) control the iteration.

        Args
        ----
        problem : `Problem`
            Our parent `Problem`.
        """

        self.pyopt_solution = None
        rel = problem.root._relevance

        # Metadata Setup
        self.metadata = create_local_meta(None, self.options['optimizer'])
        self.iter_count = 0
        update_local_meta(self.metadata, (self.iter_count,))

        # Initial Run
        problem.root.solve_nonlinear(metadata=self.metadata)

        opt_prob = Optimization(self.options['title'], self.objfunc)

        # Add all parameters
        param_meta = self.get_param_metadata()
        param_list = list(iterkeys(param_meta))
        param_vals = self.get_params()
        for name, meta in iteritems(param_meta):
            opt_prob.addVarGroup(name, meta['size'], type='c',
                                 value=param_vals[name],
                                 lower=meta['low'], upper=meta['high'])

        opt_prob.finalizeDesignVariables()

        # Add all objectives
        objs = self.get_objectives()
        self.quantities = list(iterkeys(objs))
        for name in objs:
            opt_prob.addObj(name)

        # Calculate and save gradient for any linear constraints.
        lcons = self.get_constraints(lintype='linear').values()
        if len(lcons) > 0:
            self.lin_jacs = problem.calc_gradient(param_list, lcons,
                                                  return_format='dict')
            #print("Linear Gradient")
            #print(self.lin_jacs)

        # Add all equality constraints
        econs = self.get_constraints(ctype='eq', lintype='nonlinear')
        con_meta = self.get_constraint_metadata()
        self.quantities += list(iterkeys(econs))
        for name in econs:
            size = con_meta[name]['size']
            lower = np.zeros((size))
            upper = np.zeros((size))

            # Sparsify Jacobian via relevance
            wrt = rel.relevant[name].intersection(param_list)

            if con_meta[name]['linear'] is True:
                opt_prob.addConGroup(name, size, lower=lower, upper=upper,
                                     linear=True, wrt=wrt,
                                     jac=self.lin_jacs[name])
            else:
                opt_prob.addConGroup(name, size, lower=lower, upper=upper,
                                     wrt=wrt)

        # Add all inequality constraints
        incons = self.get_constraints(ctype='ineq', lintype='nonlinear')
        self.quantities += list(iterkeys(incons))
        for name in incons:
            size = con_meta[name]['size']
            upper = np.zeros((size))

            # Sparsify Jacobian via relevance
            wrt = rel.relevant[name].intersection(param_list)

            if con_meta[name]['linear'] is True:
                opt_prob.addConGroup(name, size, upper=upper, linear=True,
                                     wrt=wrt, jac=self.lin_jacs[name])
            else:
                opt_prob.addConGroup(name, size, upper=upper, wrt=wrt)

        # TODO: Support double-sided constraints in openMDAO
        # Add all double_sided constraints
        #for name, con in iteritems(self.get_2sided_constraints()):
            #size = con_meta[name]['size']
            #upper = con.high * np.ones((size))
            #lower = con.low * np.ones((size))
            #name = '%s.out0' % con.pcomp_name
            #if con.linear is True:
                #opt_prob.addConGroup(name,
                #size, upper=upper, lower=lower,
                                     #linear=True, wrt=param_list,
                                     #jac=self.lin_jacs[name])
            #else:
                #opt_prob.addConGroup(name,
                #                     size, upper=upper, lower=lower)

        # Instantiate the requested optimizer
        optimizer = self.options['optimizer']
        try:
            exec('from pyoptsparse import %s' % optimizer)
        except ImportError:
            msg = "Optimizer %s is not available in this installation." % \
                   optimizer
            raise ImportError(msg)

        optname = vars()[optimizer]
        opt = optname()

        #Set optimization options
        for option, value in self.opt_settings.items():
            opt.setOption(option, value)

        self._problem = problem

        # Execute the optimization problem
        if self.options['pyopt_diff'] is True:
            # Use pyOpt's internal finite difference
            fd_step = problem.root.fd_options['step_size']
            sol = opt(opt_prob, sens='FD', sensStep=fd_step)
        else:
            # Use OpenMDAO's differentiator for the gradient
            sol = opt(opt_prob, sens=self.gradfunc)

        self._problem = None

        # Print results
        if self.options['print_results'] is True:
            print(sol)

        # Pull optimal parameters back into framework and re-run, so that
        # framework is left in the right final state
        dv_dict = sol.getDVs()
        for name in self.get_params():
            val = dv_dict[name]
            self.set_param(name, val)

        self.root.solve_nonlinear(metadata=self.metadata)

        # Save the most recent solution.
        self.pyopt_solution = sol
        try:
            exit_status = sol.optInform['value']
            self.exit_flag = 1
            if exit_status > 2: # bad
                self.exit_flag = 0
        except KeyError: #nothing is here, so something bad happened!
            self.exit_flag = 0
    def run(self, problem):
        """pyOpt execution. Note that pyOpt controls the execution, and the
        individual optimizers (i.e., SNOPT) control the iteration.

        Args
        ----
        problem : `Problem`
            Our parent `Problem`.
        """

        self.pyopt_solution = None
        rel = problem.root._probdata.relevance

        # Metadata Setup
        self.metadata = create_local_meta(None, self.options['optimizer'])
        self.iter_count = 0
        update_local_meta(self.metadata, (self.iter_count,))

        # Initial Run
        with problem.root._dircontext:
            problem.root.solve_nonlinear(metadata=self.metadata)

        opt_prob = Optimization(self.options['title'], self._objfunc)

        # Add all parameters
        param_meta = self.get_desvar_metadata()
        self.indep_list = indep_list = list(param_meta)
        param_vals = self.get_desvars()

        for name, meta in iteritems(param_meta):
            opt_prob.addVarGroup(name, meta['size'], type='c',
                                 value=param_vals[name],
                                 lower=meta['lower'], upper=meta['upper'])

        opt_prob.finalizeDesignVariables()

        # Figure out parameter subsparsity for paramcomp index connections.
        # sub_param_conns is empty unless there are some index conns.
        # full_param_conns gets filled with the connections to the entire
        # parameter so that those params can be filtered out of the sparse
        # set if the full path is also relevant
        sub_param_conns = {}
        full_param_conns = {}
        for name in indep_list:
            pathname = problem.root.unknowns.metadata(name)['pathname']
            sub_param_conns[name] = {}
            full_param_conns[name] = set()
            for target, info in iteritems(problem.root.connections):
                src, indices = info
                if src == pathname:
                    if indices is not None:
                        # Need to map the connection indices onto the desvar
                        # indices if both are declared.
                        dv_idx = param_meta[name].get('indices')
                        indices = set(indices)
                        if dv_idx is not None:
                            indices.intersection_update(dv_idx)
                            ldv_idx = list(dv_idx)
                            mapped_idx = [ldv_idx.index(item) for item in indices]
                            sub_param_conns[name][target] = mapped_idx
                        else:
                            sub_param_conns[name][target] = indices
                    else:
                        full_param_conns[name].add(target)

        # Add all objectives
        objs = self.get_objectives()
        self.quantities = list(objs)
        self.sparsity = OrderedDict()
        self.sub_sparsity = OrderedDict()
        for name in objs:
            opt_prob.addObj(name)
            self.sparsity[name] = self.indep_list

        # Calculate and save gradient for any linear constraints.
        lcons = self.get_constraints(lintype='linear').keys()
        if len(lcons) > 0:
            self.lin_jacs = problem.calc_gradient(indep_list, lcons,
                                                  return_format='dict')
            #print("Linear Gradient")
            #print(self.lin_jacs)

        # Add all equality constraints
        econs = self.get_constraints(ctype='eq', lintype='nonlinear')
        con_meta = self.get_constraint_metadata()
        self.quantities += list(econs)

        for name in self.get_constraints(ctype='eq'):
            meta = con_meta[name]
            size = meta['size']
            lower = upper = meta['equals']

            # Sparsify Jacobian via relevance
            rels = rel.relevant[name]
            wrt = rels.intersection(indep_list)
            self.sparsity[name] = wrt

            if meta['linear']:
                opt_prob.addConGroup(name, size, lower=lower, upper=upper,
                                     linear=True, wrt=wrt,
                                     jac=self.lin_jacs[name])
            else:

                jac = self._build_sparse(name, wrt, size, param_vals,
                                         sub_param_conns, full_param_conns, rels)
                opt_prob.addConGroup(name, size, lower=lower, upper=upper,
                                     wrt=wrt, jac=jac)

        # Add all inequality constraints
        incons = self.get_constraints(ctype='ineq', lintype='nonlinear')
        self.quantities += list(incons)

        for name in self.get_constraints(ctype='ineq'):
            meta = con_meta[name]
            size = meta['size']

            # Bounds - double sided is supported
            lower = meta['lower']
            upper = meta['upper']

            # Sparsify Jacobian via relevance
            rels = rel.relevant[name]
            wrt = rels.intersection(indep_list)
            self.sparsity[name] = wrt

            if meta['linear']:
                opt_prob.addConGroup(name, size, upper=upper, lower=lower,
                                     linear=True, wrt=wrt,
                                     jac=self.lin_jacs[name])
            else:

                jac = self._build_sparse(name, wrt, size, param_vals,
                                         sub_param_conns, full_param_conns, rels)
                opt_prob.addConGroup(name, size, upper=upper, lower=lower,
                                     wrt=wrt, jac=jac)

        # Instantiate the requested optimizer
        optimizer = self.options['optimizer']
        try:
            exec('from pyoptsparse import %s' % optimizer)
        except ImportError:
            msg = "Optimizer %s is not available in this installation." % \
                   optimizer
            raise ImportError(msg)

        optname = vars()[optimizer]
        opt = optname()

        #Set optimization options
        for option, value in self.opt_settings.items():
            opt.setOption(option, value)

        self._problem = problem

        # Execute the optimization problem
        if self.options['pyopt_diff']:
            # Use pyOpt's internal finite difference
            fd_step = problem.root.fd_options['step_size']
            sol = opt(opt_prob, sens='FD', sensStep=fd_step, storeHistory=self.hist_file)
        else:
            # Use OpenMDAO's differentiator for the gradient
            sol = opt(opt_prob, sens=self._gradfunc, storeHistory=self.hist_file)

        self._problem = None

        # Print results
        if self.options['print_results']:
            print(sol)

        # Pull optimal parameters back into framework and re-run, so that
        # framework is left in the right final state
        dv_dict = sol.getDVs()
        for name in indep_list:
            val = dv_dict[name]
            self.set_desvar(name, val)

        with self.root._dircontext:
            self.root.solve_nonlinear(metadata=self.metadata)

        # Save the most recent solution.
        self.pyopt_solution = sol
        try:
            exit_status = sol.optInform['value']
            self.exit_flag = 1
            if exit_status > 2: # bad
                self.exit_flag = 0
        except KeyError: #nothing is here, so something bad happened!
            self.exit_flag = 0
Esempio n. 34
0
    def solve(self, params, unknowns, resids, system, metadata=None):
        """ Solves the system using a Netwon's Method.

        Args
        ----
        params : `VecWrapper`
            `VecWrapper` containing parameters. (p)

        unknowns : `VecWrapper`
            `VecWrapper` containing outputs and states. (u)

        resids : `VecWrapper`
            `VecWrapper` containing residuals. (r)

        system : `System`
            Parent `System` object.

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        atol = self.options['atol']
        rtol = self.options['rtol']
        maxiter = self.options['maxiter']
        ls_atol = self.options['ls_atol']
        ls_rtol = self.options['ls_rtol']
        ls_maxiter = self.options['ls_maxiter']
        alpha = self.options['alpha']

        # Metadata setup
        self.iter_count = 0
        ls_itercount = 0
        local_meta = create_local_meta(metadata, system.name)
        system.ln_solver.local_meta = local_meta
        update_local_meta(local_meta, (self.iter_count, ls_itercount))

        # Perform an initial run to propagate srcs to targets.
        if self.options['solve_subsystems'] is True:
            system.children_solve_nonlinear(local_meta)
        system.apply_nonlinear(params, unknowns, resids)

        f_norm = resids.norm()
        f_norm0 = f_norm

        if self.options['iprint'] > 0:
            self.print_norm('NEWTON', local_meta, 0, f_norm, f_norm0)

        arg = system.drmat[None]
        result = system.dumat[None]

        alpha_base = alpha
        while self.iter_count < maxiter and f_norm > atol and \
                f_norm/f_norm0 > rtol:

            # Linearize Model
            system.jacobian(params, unknowns, resids)

            # Calculate direction to take step
            arg.vec[:] = resids.vec[:]
            system.solve_linear(system.dumat, system.drmat, [None], mode='fwd')

            unknowns.vec[:] += alpha * result.vec[:]

            # Metadata update
            self.iter_count += 1
            ls_itercount = 0
            update_local_meta(local_meta, (self.iter_count, ls_itercount))

            # Just evaluate the model with the new points
            if self.options['solve_subsystems'] is True:
                system.children_solve_nonlinear(local_meta)
            system.apply_nonlinear(params, unknowns, resids, local_meta)

            for recorder in self.recorders:
                recorder.raw_record(params, unknowns, resids, local_meta)

            f_norm = resids.norm()
            if self.options['iprint'] > 0:
                self.print_norm('NEWTON', local_meta, self.iter_count, f_norm,
                                f_norm0)

            # Backtracking Line Search
            while ls_itercount < ls_maxiter and \
                    f_norm > ls_atol and \
                    f_norm/f_norm0 > ls_rtol:

                alpha *= 0.5
                unknowns.vec[:] -= alpha * result.vec[:]
                ls_itercount += 1

                # Metadata update
                update_local_meta(local_meta, (self.iter_count, ls_itercount))

                # Just evaluate the model with the new points
                if self.options['solve_subsystems'] is True:
                    system.children_solve_nonlinear(local_meta)
                system.apply_nonlinear(params, unknowns, resids, local_meta)

                for recorder in self.recorders:
                    recorder.raw_record(params, unknowns, resids, local_meta)

                f_norm = resids.norm()
                if self.options['iprint'] > 1:
                    self.print_norm('BK_TKG',
                                    local_meta,
                                    ls_itercount,
                                    f_norm,
                                    f_norm0,
                                    indent=1,
                                    solver='LS')

            # Reset backtracking
            alpha = alpha_base

        # Need to make sure the whole workflow is executed at the final
        # point, not just evaluated.
        #self.iter_count += 1
        #update_local_meta(local_meta, (self.iter_count, 0))
        #system.children_solve_nonlinear(local_meta)

        if self.options['iprint'] > 0:
            self.print_norm('NEWTON',
                            local_meta,
                            self.iter_count,
                            f_norm,
                            f_norm0,
                            msg='Converged')
Esempio n. 35
0
    def run(self, problem):
        """Optimize the problem using your choice of Scipy optimizer.

        Args
        ----
        problem : `Problem`
            Our parent `Problem`.
        """

        # Metadata Setup
        opt = self.options['optimizer']
        self.metadata = create_local_meta(None, opt)
        self.iter_count = 0
        update_local_meta(self.metadata, (self.iter_count, ))

        # Initial Run
        problem.root.solve_nonlinear(metadata=self.metadata)

        pmeta = self.get_param_metadata()
        self.params = list(iterkeys(pmeta))
        self.objs = list(iterkeys(self.get_objectives()))
        con_meta = self.get_constraint_metadata()
        self.cons = list(iterkeys(con_meta))

        self.opt_settings['maxiter'] = self.options['maxiter']
        self.opt_settings['disp'] = self.options['disp']

        # Size Problem
        nparam = 0
        for param in itervalues(pmeta):
            nparam += param['size']
        x_init = np.zeros(nparam)

        # Initial Parameters
        i = 0
        use_bounds = (opt in _bounds_optimizers)
        if use_bounds:
            bounds = []
        else:
            bounds = None

        for name, val in iteritems(self.get_params()):
            size = pmeta[name]['size']
            x_init[i:i + size] = val
            i += size

            # Bounds if our optimizer supports them
            if use_bounds:
                meta_low = pmeta[name]['low']
                meta_high = pmeta[name]['high']
                for j in range(0, size):

                    if isinstance(meta_low, np.ndarray):
                        p_low = meta_low[j]
                    else:
                        p_low = meta_low

                    if isinstance(meta_high, np.ndarray):
                        p_high = meta_high[j]
                    else:
                        p_high = meta_high

                    bounds.append((p_low, p_high))

        # Constraints
        constraints = []
        i = 0
        if opt in _constraint_optimizers:
            for name, meta in con_meta.items():
                size = meta['size']
                for j in range(0, size):
                    con_dict = {}
                    con_dict['type'] = meta['ctype']
                    con_dict['fun'] = self.confunc
                    if opt in _constraint_grad_optimizers:
                        con_dict['jac'] = self.congradfunc
                    con_dict['args'] = [name, j]
                    constraints.append(con_dict)
                self.con_idx[name] = i
                i += size

        # Provide gradients for optimizers that support it
        if opt in _gradient_optimizers:
            jac = self.gradfunc
        else:
            jac = None

        # optimize
        self._problem = problem
        result = minimize(
            self.objfunc,
            x_init,
            #args=(),
            method=opt,
            jac=jac,
            #hess=None,
            #hessp=None,
            bounds=bounds,
            constraints=constraints,
            tol=self.options['tol'],
            #callback=None,
            options=self.opt_settings)

        self._problem = None
        self.result = result

        print('Optimization Complete')
        print('-' * 35)
Esempio n. 36
0
    def solve(self, params, unknowns, resids, system, metadata=None):
        """ Solves the system using Gauss Seidel.

        Args
        ----
        params : `VecWrapper`
            `VecWrapper` containing parameters. (p)

        unknowns : `VecWrapper`
            `VecWrapper` containing outputs and states. (u)

        resids : `VecWrapper`
            `VecWrapper` containing residuals. (r)

        system : `System`
            Parent `System` object.

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        atol = self.options['atol']
        rtol = self.options['rtol']
        utol = self.options['utol']
        maxiter = self.options['maxiter']
        iprint = self.options['iprint']
        unknowns_cache = self.unknowns_cache

        # Initial run
        self.iter_count = 1

        # Metadata setup
        local_meta = create_local_meta(metadata, system.pathname)
        system.ln_solver.local_meta = local_meta
        update_local_meta(local_meta, (self.iter_count,))

        # Initial Solve
        system.children_solve_nonlinear(local_meta)

        self.recorders.record_iteration(system, local_meta)

        # Bail early if the user wants to.
        if maxiter == 1:
            return

        resids = system.resids
        unknowns_cache = np.zeros(unknowns.vec.shape)

        # Evaluate Norm
        system.apply_nonlinear(params, unknowns, resids)
        normval = resids.norm()
        basenorm = normval if normval > atol else 1.0
        u_norm = 1.0e99

        if iprint == 2:
            self.print_norm(self.print_name, system, 1, normval, basenorm)

        while self.iter_count < maxiter and \
                normval > atol and \
                normval/basenorm > rtol  and \
                u_norm > utol:

            # Metadata update
            self.iter_count += 1
            update_local_meta(local_meta, (self.iter_count,))
            unknowns_cache[:] = unknowns.vec

            # Runs an iteration
            system.children_solve_nonlinear(local_meta)
            self.recorders.record_iteration(system, local_meta)

            # Evaluate Norm
            system.apply_nonlinear(params, unknowns, resids)
            normval = resids.norm()
            u_norm = np.linalg.norm(unknowns.vec - unknowns_cache)

            if iprint == 2:
                self.print_norm(self.print_name, system, self.iter_count, normval,
                                basenorm, u_norm=u_norm)

        # Final residual print if you only want the last one
        if iprint == 1:
            self.print_norm(self.print_name, system, self.iter_count, normval,
                            basenorm, u_norm=u_norm)

        if self.iter_count >= maxiter or isnan(normval):
            msg = 'FAILED to converge after %d iterations' % self.iter_count
            fail = True
        else:
            fail = False

        if iprint > 0 or (fail and iprint > -1 ):
            if not fail:
                msg = 'Converged in %d iterations' % self.iter_count

            self.print_norm(self.print_name, system, self.iter_count, normval,
                            basenorm, msg=msg)

        if fail and self.options['err_on_maxiter']:
            raise AnalysisError("Solve in '%s': NLGaussSeidel %s" %
                                (system.pathname, msg))
    def objfunc(self, dv_dict):
        """ Function that evaluates and returns the objective function and
        constraints. This function is passed to pyOpt's Optimization object
        and is called from its optimizers.

        Args
        ----
        dv_dict : dict
            Dictionary of design variable values.

        Returns
        -------
        func_dict : dict
            Dictionary of all functional variables evaluated at design point.

        fail : int
            0 for successful function evaluation
            1 for unsuccessful function evaluation
        """

        fail = 1
        func_dict = {}
        metadata = self.metadata
        system = self.root
        comm = system.comm
        iproc = comm.rank
        nproc = comm.size

        try:
            for name in self.get_params():
                self.set_param(name, dv_dict[name])

            # Execute the model
            #print("Setting DV")
            #print(dv_dict)

            self.iter_count += 1
            update_local_meta(metadata, (self.iter_count,))

            system.solve_nonlinear(metadata=metadata)
            for recorder in self.recorders:
                recorder.raw_record(system.params, system.unknowns,
                                    system.resids, metadata)

            # Get the objective function evaluations
            for name, obj in iteritems(self.get_objectives()):
                # if nproc > 1:
                #     owner = system._owning_ranks[name]
                #     if iproc == owner:
                #         func_dict[name] = comm.bcast(obj, root=owner)
                #     else:
                #         func_dict[name] = comm.bcast(None, root=owner)
                # else:
                func_dict[name] = obj

            # Get the constraint evaluations
            for name, con in iteritems(self.get_constraints()):
                # if nproc > 1:
                #     owner = system._owning_ranks[name]
                #     if iproc == owner:
                #         func_dict[name] = comm.bcast(con, root=owner)
                #     else:
                #         func_dict[name] = comm.bcast(None, root=owner)
                # else:
                func_dict[name] = con

            # Get the double-sided constraint evaluations
            #for key, con in iteritems(self.get_2sided_constraints()):
            #    func_dict[name] = np.array(con.evaluate(self.parent))

            fail = 0

        except Exception as msg:

            # Exceptions seem to be swallowed by the C code, so this
            # should give the user more info than the dreaded "segfault"
            print("Exception: %s" % str(msg))
            print(70*"=")
            import traceback
            traceback.print_exc()
            print(70*"=")

        #print("Functions calculated")
        #print(func_dict)
        return func_dict, fail
Esempio n. 38
0
    def _save_case(self, case, meta=None):
        if self._num_par_doe > 1:
            if self._load_balance:
                self.recorders.record_completed_case(self.root, case)
            else:
                self.recorders.record_iteration(self.root,
                                                meta,
                                                dummy=(case is None))
        else:
            self.recorders.record_iteration(self.root, meta)

    def _prep_case(self, case, iter_count):
        """Create metadata for the case and set design variables.
        """
        metadata = create_local_meta(None, 'Driver')
        update_local_meta(metadata, (iter_count, ))
        for dv_name, dv_val in case:
            self.set_desvar(dv_name, dv_val)
        return metadata

    def _try_case(self, root, metadata):
        """Run a case and save exception info and mark the metadata
        if the case fails.
        """

        terminate = False
        exc = None

        metadata['terminate'] = 0

        try:
Esempio n. 39
0
    def solve(self, params, unknowns, resids, system, metadata=None):
        """ Solves the system using a Netwon's Method.

        Args
        ----
        params : `VecWrapper`
            `VecWrapper` containing parameters. (p)

        unknowns : `VecWrapper`
            `VecWrapper` containing outputs and states. (u)

        resids : `VecWrapper`
            `VecWrapper` containing residuals. (r)

        system : `System`
            Parent `System` object.

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        atol = self.options['atol']
        rtol = self.options['rtol']
        utol = self.options['utol']
        maxiter = self.options['maxiter']
        alpha_scalar = self.options['alpha']
        iprint = self.options['iprint']
        ls = self.line_search
        unknowns_cache = self.unknowns_cache

        # Metadata setup
        self.iter_count = 0
        local_meta = create_local_meta(metadata, system.pathname)
        if self.ln_solver:
            self.ln_solver.local_meta = local_meta
        else:
            system.ln_solver.local_meta = local_meta
        update_local_meta(local_meta, (self.iter_count, 0))

        # Perform an initial run to propagate srcs to targets.
        system.apply_nonlinear(params, unknowns, resids)
        f_norm = resids.norm()

        if f_norm < atol: 
            if iprint == 2:
                self.print_norm(self.print_name, system, self.iter_count,
                                f_norm, 1.0, u_norm=1.0)
            return 

        if self.options['solve_subsystems']:
            system.children_solve_nonlinear(local_meta)
        system.apply_nonlinear(params, unknowns, resids)

        if ls:
            base_u = np.zeros(unknowns.vec.shape)

        f_norm = resids.norm()
        f_norm0 = f_norm

        if iprint == 2:
            self.print_norm(self.print_name, system, 0, f_norm,
                            f_norm0)

        arg = system.drmat[None]
        result = system.dumat[None]
        u_norm = 1.0e99

        # Can't have the system trying to FD itself when it also contains Newton.
        save_type = system.deriv_options['type']
        system.deriv_options.locked = False
        system.deriv_options['type'] = 'user'

        while self.iter_count < maxiter and f_norm > atol and \
                f_norm/f_norm0 > rtol and u_norm > utol:

            # Linearize Model with partial derivatives
            system._sys_linearize(params, unknowns, resids, total_derivs=False)

            # Calculate direction to take step
            arg.vec[:] = -resids.vec
            with system._dircontext:
                system.solve_linear(system.dumat, system.drmat, [None], mode='fwd', solver=self.ln_solver)
            system.clear_dparams()

            self.iter_count += 1

            # Allow different alphas for each value so we can keep moving when we
            # hit a bound.
            alpha = alpha_scalar*np.ones(len(unknowns.vec))

            # If our step will violate any upper or lower bounds, then reduce
            # alpha in just that direction so that we only step to that
            # boundary.
            alpha = unknowns.distance_along_vector_to_limit(alpha, result)

            # Cache the current norm
            if ls:
                base_u[:] = unknowns.vec
                base_norm = f_norm

            # Apply step that doesn't violate bounds
            unknowns_cache[:] = unknowns.vec
            unknowns.vec += alpha*result.vec

            # Metadata update
            update_local_meta(local_meta, (self.iter_count, 0))

            # Just evaluate (and optionally solve) the model with the new
            # points
            sub_solve_limit = self.options['solve_subsystems_limit']
            if self.options['solve_subsystems'] and (sub_solve_limit == -1 or self.iter_count <= sub_solve_limit):
                system.children_solve_nonlinear(local_meta)
            system.apply_nonlinear(params, unknowns, resids, local_meta)

            self.recorders.record_iteration(system, local_meta)

            f_norm = resids.norm()
            u_norm = np.linalg.norm(unknowns.vec - unknowns_cache)
            if iprint == 2:
                self.print_norm(self.print_name, system, self.iter_count,
                                f_norm, f_norm0, u_norm=u_norm)

            # Line Search to determine how far to step in the Newton direction
            if ls:
                f_norm = ls.solve(params, unknowns, resids, system, self,
                                  alpha_scalar, alpha, base_u, base_norm,
                                  f_norm, f_norm0, metadata)


        # Final residual print if you only want the last one
        if iprint == 1:
            self.print_norm(self.print_name, system, self.iter_count,
                            f_norm, f_norm0, u_norm=u_norm)

        # Return system's FD status back to what it was
        system.deriv_options['type'] = save_type
        system.deriv_options.locked = True

        if self.iter_count >= maxiter or isnan(f_norm):
            msg = 'FAILED to converge after %d iterations' % self.iter_count
            fail = True
        else:
            msg = 'Converged in %d iterations' % self.iter_count
            fail = False

        if iprint > 0 or (fail and iprint > -1 ):

            self.print_norm(self.print_name, system, self.iter_count,
                            f_norm, f_norm0, msg=msg)

        if fail and self.options['err_on_maxiter']:
            raise AnalysisError("Solve in '%s': Newton %s" % (system.pathname,
                                                              msg))
Esempio n. 40
0
    def _objfunc(self, dv_dict):
        """ Function that evaluates and returns the objective function and
        constraints. This function is passed to pyOpt's Optimization object
        and is called from its optimizers.

        Args
        ----
        dv_dict : dict
            Dictionary of design variable values.

        Returns
        -------
        func_dict : dict
            Dictionary of all functional variables evaluated at design point.

        fail : int
            0 for successful function evaluation
            1 for unsuccessful function evaluation
        """

        fail = 0
        metadata = self.metadata
        system = self.root

        try:
            for name in self.indep_list:
                self.set_desvar(name, dv_dict[name])

            # Execute the model
            #print("Setting DV")
            #print(dv_dict)

            self.iter_count += 1
            update_local_meta(metadata, (self.iter_count, ))

            try:
                with self.root._dircontext:
                    system.solve_nonlinear(metadata=metadata)

            # Let the optimizer try to handle the error
            except AnalysisError:
                fail = 1

            func_dict = self.get_objectives()  # this returns a new OrderedDict
            func_dict.update(self.get_constraints())

            # Record after getting obj and constraint to assure they have
            # been gathered in MPI.
            self.recorders.record_iteration(system, metadata)

            # Get the double-sided constraint evaluations
            #for key, con in iteritems(self.get_2sided_constraints()):
            #    func_dict[name] = np.array(con.evaluate(self.parent))

        except Exception as msg:
            tb = traceback.format_exc()

            # Exceptions seem to be swallowed by the C code, so this
            # should give the user more info than the dreaded "segfault"
            print("Exception: %s" % str(msg))
            print(70 * "=", tb, 70 * "=")
            fail = 1
            func_dict = {}

        #print("Functions calculated")
        #print(func_dict)
        return func_dict, fail
Esempio n. 41
0
    def run(self, problem):
        """pyOpt execution. Note that pyOpt controls the execution, and the
        individual optimizers (i.e., SNOPT) control the iteration.

        Args
        ----
        problem : `Problem`
            Our parent `Problem`.
        """

        self.pyopt_solution = None
        rel = problem.root._probdata.relevance

        # Metadata Setup
        self.metadata = create_local_meta(None, self.options['optimizer'])
        self.iter_count = 0
        update_local_meta(self.metadata, (self.iter_count, ))

        # Initial Run
        with problem.root._dircontext:
            problem.root.solve_nonlinear(metadata=self.metadata)

        opt_prob = Optimization(self.options['title'], self._objfunc)

        # Add all parameters
        param_meta = self.get_desvar_metadata()
        self.indep_list = indep_list = list(param_meta)
        param_vals = self.get_desvars()

        for name, meta in iteritems(param_meta):
            opt_prob.addVarGroup(name,
                                 meta['size'],
                                 type='c',
                                 value=param_vals[name],
                                 lower=meta['lower'],
                                 upper=meta['upper'])

        opt_prob.finalizeDesignVariables()

        # Figure out parameter subsparsity for paramcomp index connections.
        # sub_param_conns is empty unless there are some index conns.
        # full_param_conns gets filled with the connections to the entire
        # parameter so that those params can be filtered out of the sparse
        # set if the full path is also relevant
        sub_param_conns = {}
        full_param_conns = {}
        for name in indep_list:
            pathname = problem.root.unknowns.metadata(name)['pathname']
            sub_param_conns[name] = {}
            full_param_conns[name] = set()
            for target, info in iteritems(problem.root.connections):
                src, indices = info
                if src == pathname:
                    if indices is not None:
                        # Need to map the connection indices onto the desvar
                        # indices if both are declared.
                        dv_idx = param_meta[name].get('indices')
                        indices = set(indices)
                        if dv_idx is not None:
                            indices.intersection_update(dv_idx)
                            ldv_idx = list(dv_idx)
                            mapped_idx = [
                                ldv_idx.index(item) for item in indices
                            ]
                            sub_param_conns[name][target] = mapped_idx
                        else:
                            sub_param_conns[name][target] = indices
                    else:
                        full_param_conns[name].add(target)

        # Add all objectives
        objs = self.get_objectives()
        self.quantities = list(objs)
        self.sparsity = OrderedDict()
        self.sub_sparsity = OrderedDict()
        for name in objs:
            opt_prob.addObj(name)
            self.sparsity[name] = self.indep_list

        # Calculate and save gradient for any linear constraints.
        lcons = self.get_constraints(lintype='linear').keys()
        self._problem = problem
        if len(lcons) > 0:
            self.lin_jacs = self.calc_gradient(indep_list,
                                               lcons,
                                               return_format='dict')
            #print("Linear Gradient")
            #print(self.lin_jacs)

        # Add all equality constraints
        econs = self.get_constraints(ctype='eq', lintype='nonlinear')
        con_meta = self.get_constraint_metadata()
        self.quantities += list(econs)

        self.active_tols = {}
        for name in self.get_constraints(ctype='eq'):
            meta = con_meta[name]
            size = meta['size']
            lower = upper = meta['equals']

            # Sparsify Jacobian via relevance
            rels = rel.relevant[name]
            wrt = rels.intersection(indep_list)
            self.sparsity[name] = wrt

            if meta['linear']:
                opt_prob.addConGroup(name,
                                     size,
                                     lower=lower,
                                     upper=upper,
                                     linear=True,
                                     wrt=wrt,
                                     jac=self.lin_jacs[name])
            else:

                jac = self._build_sparse(name, wrt, size, param_vals,
                                         sub_param_conns, full_param_conns,
                                         rels)
                opt_prob.addConGroup(name,
                                     size,
                                     lower=lower,
                                     upper=upper,
                                     wrt=wrt,
                                     jac=jac)

            active_tol = meta.get('active_tol')
            if active_tol:
                self.active_tols[name] = active_tol

        # Add all inequality constraints
        incons = self.get_constraints(ctype='ineq', lintype='nonlinear')
        self.quantities += list(incons)

        for name in self.get_constraints(ctype='ineq'):
            meta = con_meta[name]
            size = meta['size']

            # Bounds - double sided is supported
            lower = meta['lower']
            upper = meta['upper']

            # Sparsify Jacobian via relevance
            rels = rel.relevant[name]
            wrt = rels.intersection(indep_list)
            self.sparsity[name] = wrt

            if meta['linear']:
                opt_prob.addConGroup(name,
                                     size,
                                     upper=upper,
                                     lower=lower,
                                     linear=True,
                                     wrt=wrt,
                                     jac=self.lin_jacs[name])
            else:

                jac = self._build_sparse(name, wrt, size, param_vals,
                                         sub_param_conns, full_param_conns,
                                         rels)
                opt_prob.addConGroup(name,
                                     size,
                                     upper=upper,
                                     lower=lower,
                                     wrt=wrt,
                                     jac=jac)

            active_tol = meta.get('active_tol')
            if active_tol is not None:
                self.active_tols[name] = active_tol

        # Instantiate the requested optimizer
        optimizer = self.options['optimizer']
        try:
            _tmp = __import__('pyoptsparse', globals(), locals(), [optimizer],
                              0)
            opt = getattr(_tmp, optimizer)()
        except ImportError:
            msg = "Optimizer %s is not available in this installation." % \
                   optimizer
            raise ImportError(msg)

        #Set optimization options
        for option, value in self.opt_settings.items():
            opt.setOption(option, value)

        self.opt_prob = opt_prob

        # Execute the optimization problem
        if self.options['gradient method'] == 'pyopt_fd':

            # Use pyOpt's internal finite difference
            fd_step = problem.root.deriv_options['step_size']
            sol = opt(opt_prob,
                      sens='FD',
                      sensStep=fd_step,
                      storeHistory=self.hist_file,
                      hotStart=self.hotstart_file)

        elif self.options['gradient method'] == 'snopt_fd':
            if self.options['optimizer'] == 'SNOPT':

                # Use SNOPT's internal finite difference
                fd_step = problem.root.deriv_options['step_size']
                sol = opt(opt_prob,
                          sens=None,
                          sensStep=fd_step,
                          storeHistory=self.hist_file,
                          hotStart=self.hotstart_file)

            else:
                msg = "SNOPT's internal finite difference can only be used with SNOPT"
                raise Exception(msg)
        else:

            # Use OpenMDAO's differentiator for the gradient
            sol = opt(opt_prob,
                      sens=self._gradfunc,
                      storeHistory=self.hist_file,
                      hotStart=self.hotstart_file)

        self._problem = None

        # Print results
        if self.options['print_results']:
            print(sol)

        # Pull optimal parameters back into framework and re-run, so that
        # framework is left in the right final state
        dv_dict = sol.getDVs()
        for name in indep_list:
            val = dv_dict[name]
            self.set_desvar(name, val)

        with self.root._dircontext:
            self.root.solve_nonlinear(metadata=self.metadata)

        # Save the most recent solution.
        self.pyopt_solution = sol
        try:
            exit_status = sol.optInform['value']
            self.exit_flag = 1
            if exit_status > 2:  # bad
                self.exit_flag = 0
        except KeyError:  #nothing is here, so something bad happened!
            self.exit_flag = 0
Esempio n. 42
0
    def solve(self,
              params,
              unknowns,
              resids,
              system,
              solver,
              alpha,
              fnorm0,
              metadata=None):
        """ Take the gradient calculated by the parent solver and figure out
        how far to go.

        Args
        ----
        params : `VecWrapper`
            `VecWrapper` containing parameters. (p)

        unknowns : `VecWrapper`
            `VecWrapper` containing outputs and states. (u)

        resids : `VecWrapper`
            `VecWrapper` containing residuals. (r)

        system : `System`
            Parent `System` object.

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).

        solver : `Solver`
            Parent solver instance.

        alpha : float
            Initial over-relaxation factor as used in parent solver.

        fnorm0 : float
            Initial norm of the residual for relative tolerance check.

        Returns
        --------
        float
            Norm of the final residual
        """

        atol = self.options['atol']
        rtol = self.options['rtol']
        maxiter = self.options['maxiter']
        result = system.dumat[None]
        local_meta = create_local_meta(metadata, system.pathname)

        # If our step will violate any upper or lower bounds, then reduce
        # alpha so that we only step to that boundary.
        alpha = unknowns.distance_along_vector_to_limit(alpha, result)

        # Apply step that doesn't violate bounds
        unknowns.vec += alpha * result.vec

        # Metadata update
        update_local_meta(local_meta, (solver.iter_count, 0))

        # Just evaluate the model with the new points
        if solver.options['solve_subsystems']:
            system.children_solve_nonlinear(local_meta)
        system.apply_nonlinear(params, unknowns, resids, local_meta)

        self.recorders.record_iteration(system, local_meta)

        # Initial execution really belongs to our parent driver's iteration,
        # so use its info.
        fnorm = resids.norm()
        if solver.options['iprint'] > 0:
            self.print_norm(solver.print_name, system.pathname,
                            solver.iter_count, fnorm, fnorm0)

        itercount = 0
        ls_alpha = alpha

        # Further backtacking if needed.
        while itercount < maxiter and \
              fnorm > atol and \
              fnorm/fnorm0 > rtol:

            ls_alpha *= 0.5
            unknowns.vec -= ls_alpha * result.vec
            itercount += 1

            # Metadata update
            update_local_meta(local_meta, (solver.iter_count, itercount))

            # Just evaluate the model with the new points
            if self.options['solve_subsystems']:
                system.children_solve_nonlinear(local_meta)
            system.apply_nonlinear(params, unknowns, resids, local_meta)

            solver.recorders.record_iteration(system, local_meta)

            fnorm = resids.norm()
            if self.options['iprint'] > 0:
                self.print_norm(self.print_name,
                                system.pathname,
                                itercount,
                                fnorm,
                                fnorm0,
                                indent=1,
                                solver='LS')

        if itercount >= maxiter and self.options['err_on_maxiter']:
            raise AnalysisError(
                "Solve in '%s': BackTracking failed to converge after %d "
                "iterations." % (system.pathname, maxiter))

        return fnorm
Esempio n. 43
0
    def solve(self, params, unknowns, resids, system, metadata=None):
        """ Solves the system using a Netwon's Method.

        Args
        ----
        params : `VecWrapper`
            `VecWrapper` containing parameters. (p)

        unknowns : `VecWrapper`
            `VecWrapper` containing outputs and states. (u)

        resids : `VecWrapper`
            `VecWrapper` containing residuals. (r)

        system : `System`
            Parent `System` object.

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        atol = self.options['atol']
        rtol = self.options['rtol']
        utol = self.options['utol']
        maxiter = self.options['maxiter']
        alpha_scalar = self.options['alpha']
        iprint = self.options['iprint']
        ls = self.line_search
        unknowns_cache = self.unknowns_cache

        # Metadata setup
        self.iter_count = 0
        local_meta = create_local_meta(metadata, system.pathname)
        if self.ln_solver:
            self.ln_solver.local_meta = local_meta
        else:
            system.ln_solver.local_meta = local_meta
        update_local_meta(local_meta, (self.iter_count, 0))

        # Perform an initial run to propagate srcs to targets.
        system.children_solve_nonlinear(local_meta)
        system.apply_nonlinear(params, unknowns, resids)

        if ls:
            base_u = np.zeros(unknowns.vec.shape)

        f_norm = resids.norm()
        f_norm0 = f_norm

        if iprint == 2:
            self.print_norm(self.print_name, system, 0, f_norm, f_norm0)

        arg = system.drmat[None]
        result = system.dumat[None]
        u_norm = 1.0e99

        # Can't have the system trying to FD itself when it also contains Newton.
        save_type = system.deriv_options['type']
        system.deriv_options.locked = False
        system.deriv_options['type'] = 'user'

        while self.iter_count < maxiter and f_norm > atol and \
                f_norm/f_norm0 > rtol and u_norm > utol:

            # Linearize Model with partial derivatives
            system._sys_linearize(params, unknowns, resids, total_derivs=False)

            # Calculate direction to take step
            arg.vec[:] = -resids.vec
            with system._dircontext:
                system.solve_linear(system.dumat,
                                    system.drmat, [None],
                                    mode='fwd',
                                    solver=self.ln_solver)

            self.iter_count += 1

            # Allow different alphas for each value so we can keep moving when we
            # hit a bound.
            alpha = alpha_scalar * np.ones(len(unknowns.vec))

            # If our step will violate any upper or lower bounds, then reduce
            # alpha in just that direction so that we only step to that
            # boundary.
            alpha = unknowns.distance_along_vector_to_limit(alpha, result)

            # Cache the current norm
            if ls:
                base_u[:] = unknowns.vec
                base_norm = f_norm

            # Apply step that doesn't violate bounds
            unknowns_cache[:] = unknowns.vec
            unknowns.vec += alpha * result.vec

            # Metadata update
            update_local_meta(local_meta, (self.iter_count, 0))

            # Just evaluate (and optionally solve) the model with the new
            # points
            if self.options['solve_subsystems']:
                system.children_solve_nonlinear(local_meta)
            system.apply_nonlinear(params, unknowns, resids, local_meta)

            self.recorders.record_iteration(system, local_meta)

            f_norm = resids.norm()
            u_norm = np.linalg.norm(unknowns.vec - unknowns_cache)
            if iprint == 2:
                self.print_norm(self.print_name,
                                system,
                                self.iter_count,
                                f_norm,
                                f_norm0,
                                u_norm=u_norm)

            # Line Search to determine how far to step in the Newton direction
            if ls:
                f_norm = ls.solve(params, unknowns, resids, system, self,
                                  alpha_scalar, alpha, base_u, base_norm,
                                  f_norm, f_norm0, metadata)

        # Final residual print if you only want the last one
        if iprint == 1:
            self.print_norm(self.print_name,
                            system,
                            self.iter_count,
                            f_norm,
                            f_norm0,
                            u_norm=u_norm)

        # Return system's FD status back to what it was
        system.deriv_options['type'] = save_type
        system.deriv_options.locked = True

        if self.iter_count >= maxiter or isnan(f_norm):
            msg = 'FAILED to converge after %d iterations' % self.iter_count
            fail = True
        else:
            msg = 'Converged in %d iterations' % self.iter_count
            fail = False

        if iprint > 0 or (fail and iprint > -1):

            self.print_norm(self.print_name,
                            system,
                            self.iter_count,
                            f_norm,
                            f_norm0,
                            msg=msg)

        if fail and self.options['err_on_maxiter']:
            raise AnalysisError("Solve in '%s': Newton %s" %
                                (system.pathname, msg))
Esempio n. 44
0
    def run(self, problem):
        """Optimize the problem using your choice of Scipy optimizer.

        Args
        ----
        problem : `Problem`
            Our parent `Problem`.
        """

        # Metadata Setup
        opt = self.options['optimizer']
        self.metadata = create_local_meta(None, opt)
        self.iter_count = 0
        update_local_meta(self.metadata, (self.iter_count, ))

        # Initial Run
        with problem.root._dircontext:
            problem.root.solve_nonlinear(metadata=self.metadata)

        pmeta = self.get_desvar_metadata()
        self.params = list(pmeta)
        self.objs = list(self.get_objectives())
        con_meta = self.get_constraint_metadata()
        self.cons = list(con_meta)
        self.con_cache = self.get_constraints()

        self.opt_settings['maxiter'] = self.options['maxiter']
        self.opt_settings['disp'] = self.options['disp']

        # Size Problem
        nparam = 0
        for param in itervalues(pmeta):
            nparam += param['size']
        x_init = np.empty(nparam)

        # Initial Parameters
        i = 0
        use_bounds = (opt in _bounds_optimizers)
        if use_bounds:
            bounds = []
        else:
            bounds = None

        for name, val in iteritems(self.get_desvars()):
            size = pmeta[name]['size']
            x_init[i:i + size] = val
            i += size

            # Bounds if our optimizer supports them
            if use_bounds:
                meta_low = pmeta[name]['lower']
                meta_high = pmeta[name]['upper']
                for j in range(0, size):

                    if isinstance(meta_low, np.ndarray):
                        p_low = meta_low[j]
                    else:
                        p_low = meta_low

                    if isinstance(meta_high, np.ndarray):
                        p_high = meta_high[j]
                    else:
                        p_high = meta_high

                    bounds.append((p_low, p_high))

        # Constraints
        constraints = []
        i = 0
        if opt in _constraint_optimizers:
            for name, meta in con_meta.items():
                size = meta['size']
                dblcon = meta['upper'] is not None and meta['lower'] is not None
                for j in range(0, size):
                    con_dict = OrderedDict()
                    if meta['equals'] is not None:
                        con_dict['type'] = 'eq'
                    else:
                        con_dict['type'] = 'ineq'
                    con_dict['fun'] = self._confunc
                    if opt in _constraint_grad_optimizers:
                        con_dict['jac'] = self._congradfunc
                    con_dict['args'] = [name, j]
                    constraints.append(con_dict)
                self.con_idx[name] = i
                i += size

                # Add extra constraint if double-sided
                if dblcon:
                    name = '2bl-' + name
                    for j in range(0, size):
                        con_dict = OrderedDict()
                        con_dict['type'] = 'ineq'
                        con_dict['fun'] = self._confunc
                        if opt in _constraint_grad_optimizers:
                            con_dict['jac'] = self._congradfunc
                        con_dict['args'] = [name, j]
                        constraints.append(con_dict)

        # Provide gradients for optimizers that support it
        if opt in _gradient_optimizers:
            jac = self._gradfunc
        else:
            jac = None

        # optimize
        self._problem = problem
        result = minimize(
            self._objfunc,
            x_init,
            #args=(),
            method=opt,
            jac=jac,
            #hess=None,
            #hessp=None,
            bounds=bounds,
            constraints=constraints,
            tol=self.options['tol'],
            #callback=None,
            options=self.opt_settings)

        self._problem = None
        self.result = result
        self.exit_flag = 1 if self.result.success else 0

        if self.options['disp']:
            print('Optimization Complete')
            print('-' * 35)
Esempio n. 45
0
    def solve(self,
              params,
              unknowns,
              resids,
              system,
              solver,
              alpha_scalar,
              alpha,
              base_u,
              base_norm,
              fnorm,
              fnorm0,
              metadata=None):
        """ Take the gradient calculated by the parent solver and figure out
        how far to go.

        Args
        ----
        params : `VecWrapper`
            `VecWrapper` containing parameters. (p)

        unknowns : `VecWrapper`
            `VecWrapper` containing outputs and states. (u)

        resids : `VecWrapper`
            `VecWrapper` containing residuals. (r)

        system : `System`
            Parent `System` object.

        solver : `Solver`
            Parent solver instance.

        alpha_scalar : float
            Initial over-relaxation factor as used in parent solver.

        alpha : ndarray
            Initial over-relaxation factor as used in parent solver, vector
            (so we don't re-allocate).

        base_u : ndarray
            Initial value of unknowns before the Newton step.

        base_norm : float
            Norm of the residual prior to taking the Newton step.

        fnorm : float
            Norm of the residual after taking the Newton step.

        fnorm0 : float
            Initial norm of the residual for iteration printing.

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).

        Returns
        --------
        float
            Norm of the final residual
        """

        maxiter = self.options['maxiter']
        rho = self.options['rho']
        c = self.options['c']
        iprint = self.options['iprint']
        result = system.dumat[None]
        local_meta = create_local_meta(metadata, system.pathname)

        itercount = 0
        ls_alpha = alpha_scalar

        # Further backtacking if needed.
        # The Armijo-Goldstein is basically a slope comparison --actual vs predicted.
        # We don't have an actual gradient, but we have the Newton vector that should
        # take us to zero, and our "runs" are the same, and we can just compare the
        # "rise".
        while itercount < maxiter and (base_norm -
                                       fnorm) < c * ls_alpha * base_norm:

            ls_alpha *= rho

            # If our step will violate any upper or lower bounds, then reduce
            # alpha in just that direction so that we only step to that
            # boundary.
            unknowns.vec[:] = base_u
            alpha[:] = ls_alpha
            alpha = unknowns.distance_along_vector_to_limit(alpha, result)

            unknowns.vec += alpha * result.vec
            itercount += 1

            # Metadata update
            update_local_meta(local_meta, (solver.iter_count, itercount))

            # Just evaluate the model with the new points
            if self.options['solve_subsystems']:
                system.children_solve_nonlinear(local_meta)
            system.apply_nonlinear(params, unknowns, resids, local_meta)

            solver.recorders.record_iteration(system, local_meta)

            fnorm = resids.norm()
            if iprint == 2:
                self.print_norm(self.print_name,
                                system,
                                itercount,
                                fnorm,
                                fnorm0,
                                indent=1,
                                solver='LS')

        # Final residual print if you only want the last one
        if iprint == 1:
            self.print_norm(self.print_name,
                            system,
                            itercount,
                            fnorm,
                            fnorm0,
                            indent=1,
                            solver='LS')

        if itercount >= maxiter or isnan(fnorm):

            if self.options['err_on_maxiter']:
                msg = "Solve in '{}': BackTracking failed to converge after {} " \
                      "iterations."
                raise AnalysisError(msg.format(system.pathname, maxiter))

            msg = 'FAILED to converge after %d iterations' % itercount
            fail = True
        else:
            msg = 'Converged in %d iterations' % itercount
            fail = False

        if iprint > 0 or (fail and iprint > -1):

            self.print_norm(self.print_name,
                            system,
                            itercount,
                            fnorm,
                            fnorm0,
                            msg=msg,
                            indent=1,
                            solver='LS')

        return fnorm
Esempio n. 46
0
    def run(self, problem):
        """pyOpt execution. Note that pyOpt controls the execution, and the
        individual optimizers (i.e., SNOPT) control the iteration.

        Args
        ----
        problem : `Problem`
            Our parent `Problem`.
        """

        self.pyopt_solution = None
        rel = problem.root._probdata.relevance

        # Metadata Setup
        self.metadata = create_local_meta(None, self.options['optimizer'])
        self.iter_count = 0
        update_local_meta(self.metadata, (self.iter_count,))

        # Initial Run
        problem.root.solve_nonlinear(metadata=self.metadata)

        opt_prob = Optimization(self.options['title'], self._objfunc)

        # Add all parameters
        param_meta = self.get_desvar_metadata()
        self.indep_list = indep_list = list(iterkeys(param_meta))
        param_vals = self.get_desvars()

        for name, meta in iteritems(param_meta):
            opt_prob.addVarGroup(name, meta['size'], type='c',
                                 value=param_vals[name],
                                 lower=meta['lower'], upper=meta['upper'])

        opt_prob.finalizeDesignVariables()

        # Add all objectives
        objs = self.get_objectives()
        self.quantities = list(iterkeys(objs))
        self.sparsity = OrderedDict() #{}
        for name in objs:
            opt_prob.addObj(name)
            self.sparsity[name] = self.indep_list

        # Calculate and save gradient for any linear constraints.
        lcons = self.get_constraints(lintype='linear').keys()
        if len(lcons) > 0:
            self.lin_jacs = problem.calc_gradient(indep_list, lcons,
                                                  return_format='dict')
            #print("Linear Gradient")
            #print(self.lin_jacs)

        # Add all equality constraints
        econs = self.get_constraints(ctype='eq', lintype='nonlinear')
        con_meta = self.get_constraint_metadata()
        self.quantities += list(iterkeys(econs))

        for name in self.get_constraints(ctype='eq'):
            size = con_meta[name]['size']
            lower = upper = con_meta[name]['equals']

            # Sparsify Jacobian via relevance
            wrt = rel.relevant[name].intersection(indep_list)
            self.sparsity[name] = wrt

            if con_meta[name]['linear'] is True:
                opt_prob.addConGroup(name, size, lower=lower, upper=upper,
                                     linear=True, wrt=wrt,
                                     jac=self.lin_jacs[name])
            else:
                opt_prob.addConGroup(name, size, lower=lower, upper=upper,
                                     wrt=wrt)

        # Add all inequality constraints
        incons = self.get_constraints(ctype='ineq', lintype='nonlinear')
        self.quantities += list(iterkeys(incons))

        for name in self.get_constraints(ctype='ineq'):
            size = con_meta[name]['size']

            # Bounds - double sided is supported
            lower = con_meta[name]['lower']
            upper = con_meta[name]['upper']

            # Sparsify Jacobian via relevance
            wrt = rel.relevant[name].intersection(indep_list)
            self.sparsity[name] = wrt

            if con_meta[name]['linear'] is True:
                opt_prob.addConGroup(name, size, upper=upper, lower=lower,
                                     linear=True, wrt=wrt,
                                     jac=self.lin_jacs[name])
            else:
                opt_prob.addConGroup(name, size, upper=upper, lower=lower,
                                     wrt=wrt)

        # Instantiate the requested optimizer
        optimizer = self.options['optimizer']
        try:
            exec('from pyoptsparse import %s' % optimizer)
        except ImportError:
            msg = "Optimizer %s is not available in this installation." % \
                   optimizer
            raise ImportError(msg)

        optname = vars()[optimizer]
        opt = optname()

        #Set optimization options
        for option, value in self.opt_settings.items():
            opt.setOption(option, value)

        self._problem = problem

        # Execute the optimization problem
        if self.options['pyopt_diff'] is True:
            # Use pyOpt's internal finite difference
            fd_step = problem.root.fd_options['step_size']
            sol = opt(opt_prob, sens='FD', sensStep=fd_step, storeHistory=self.hist_file)
        else:
            # Use OpenMDAO's differentiator for the gradient
            sol = opt(opt_prob, sens=self._gradfunc, storeHistory=self.hist_file)

        self._problem = None

        # Print results
        if self.options['print_results'] is True:
            print(sol)

        # Pull optimal parameters back into framework and re-run, so that
        # framework is left in the right final state
        dv_dict = sol.getDVs()
        for name in indep_list:
            val = dv_dict[name]
            self.set_desvar(name, val)

        self.root.solve_nonlinear(metadata=self.metadata)

        # Save the most recent solution.
        self.pyopt_solution = sol
        try:
            exit_status = sol.optInform['value']
            self.exit_flag = 1
            if exit_status > 2: # bad
                self.exit_flag = 0
        except KeyError: #nothing is here, so something bad happened!
            self.exit_flag = 0
Esempio n. 47
0
    def solve(self, params, unknowns, resids, system, metadata=None):
        """ Solves the system using a Netwon's Method.

        Args
        ----
        params : `VecWrapper`
            `VecWrapper` containing parameters. (p)

        unknowns : `VecWrapper`
            `VecWrapper` containing outputs and states. (u)

        resids : `VecWrapper`
            `VecWrapper` containing residuals. (r)

        system : `System`
            Parent `System` object.

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        atol = self.options['atol']
        rtol = self.options['rtol']
        maxiter = self.options['maxiter']
        ls_atol = self.options['ls_atol']
        ls_rtol = self.options['ls_rtol']
        ls_maxiter = self.options['ls_maxiter']
        alpha = self.options['alpha']

        # Metadata setup
        self.iter_count = 0
        ls_itercount = 0
        local_meta = create_local_meta(metadata, system.name)
        system.ln_solver.local_meta = local_meta
        update_local_meta(local_meta, (self.iter_count, ls_itercount))

        # Perform an initial run to propagate srcs to targets.
        system.children_solve_nonlinear(local_meta)
        system.apply_nonlinear(params, unknowns, resids)

        f_norm = resids.norm()
        f_norm0 = f_norm

        if self.options['iprint'] > 0:
            self.print_norm('NEWTON', local_meta, 0, f_norm, f_norm0)

        arg = system.drmat[None]
        result = system.dumat[None]

        alpha_base = alpha
        while self.iter_count < maxiter and f_norm > atol and \
                f_norm/f_norm0 > rtol:

            # Linearize Model
            system.jacobian(params, unknowns, resids)

            # Calculate direction to take step
            arg.vec[:] = resids.vec[:]
            system.solve_linear(system.dumat, system.drmat, [None], mode='fwd')

            unknowns.vec[:] += alpha*result.vec[:]

            # Metadata update
            self.iter_count += 1
            ls_itercount = 0
            update_local_meta(local_meta, (self.iter_count, ls_itercount))

            # Just evaluate the model with the new points
            system.children_solve_nonlinear(local_meta)
            system.apply_nonlinear(params, unknowns, resids, local_meta)

            for recorder in self.recorders:
                recorder.raw_record(params, unknowns, resids, local_meta)

            f_norm = resids.norm()
            if self.options['iprint'] > 0:
                self.print_norm('NEWTON', local_meta, self.iter_count, f_norm, f_norm0)

            # Backtracking Line Search
            while ls_itercount < ls_maxiter and \
                    f_norm > ls_atol and \
                    f_norm/f_norm0 > ls_rtol:

                alpha *= 0.5
                unknowns.vec[:] -= alpha*result.vec[:]
                ls_itercount += 1

                # Metadata update
                update_local_meta(local_meta, (self.iter_count, ls_itercount))

                # Just evaluate the model with the new points

                system.children_solve_nonlinear(local_meta)
                system.apply_nonlinear(params, unknowns, resids, local_meta)

                for recorder in self.recorders:
                    recorder.raw_record(params, unknowns, resids, local_meta)

                f_norm = resids.norm()
                if self.options['iprint'] > 1:
                    self.print_norm('BK_TKG', local_meta, ls_itercount, f_norm,
                                    f_norm/f_norm0, indent=1, solver='LS')

            # Reset backtracking
            alpha = alpha_base

            for recorder in self.recorders:
                recorder.raw_record(params, unknowns, resids, local_meta)

        # Need to make sure the whole workflow is executed at the final
        # point, not just evaluated.
        #self.iter_count += 1
        #update_local_meta(local_meta, (self.iter_count, 0))
        #system.children_solve_nonlinear(local_meta)

        if self.options['iprint'] > 0:
            self.print_norm('NEWTON', local_meta, self.iter_count, f_norm,
                            f_norm0, msg='Converged')
    def solve(self, params, unknowns, resids, system, metadata=None):
        """ Solves the system using Gauss Seidel.

        Args
        ----
        params : `VecWrapper`
            `VecWrapper` containing parameters. (p)

        unknowns : `VecWrapper`
            `VecWrapper` containing outputs and states. (u)

        resids : `VecWrapper`
            `VecWrapper` containing residuals. (r)

        system : `System`
            Parent `System` object.

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        atol = self.options['atol']
        rtol = self.options['rtol']
        utol = self.options['utol']
        maxiter = self.options['maxiter']
        rutol = self.options['rutol']
        iprint = self.options['iprint']
        unknowns_cache = self.unknowns_cache

        # Initial run
        self.iter_count = 1

        # Metadata setup
        local_meta = create_local_meta(metadata, system.pathname)
        system.ln_solver.local_meta = local_meta
        update_local_meta(local_meta, (self.iter_count,))

        # Initial Solve
        system.children_solve_nonlinear(local_meta)

        self.recorders.record_iteration(system, local_meta)

        # Bail early if the user wants to.
        if maxiter == 1:
            return

        resids = system.resids
        unknowns_cache = np.zeros(unknowns.vec.shape)

        # Evaluate Norm
        system.apply_nonlinear(params, unknowns, resids)
        normval = resids.norm()
        basenorm = normval if normval > atol else 1.0
        u_norm = 1.0e99
        ru_norm = 1.0e99

        if iprint == 2:
            self.print_norm(self.print_name, system, 1, normval, basenorm)

        while self.iter_count < maxiter and \
                normval > atol and \
                normval/basenorm > rtol  and \
                u_norm > utol and \
                ru_norm > rutol:

            # Metadata update
            self.iter_count += 1
            update_local_meta(local_meta, (self.iter_count,))
            unknowns_cache[:] = unknowns.vec

            # Runs an iteration
            system.children_solve_nonlinear(local_meta)
            self.recorders.record_iteration(system, local_meta)

            # Evaluate Norm
            system.apply_nonlinear(params, unknowns, resids)
            normval = resids.norm()
            u_norm = np.linalg.norm(unknowns.vec - unknowns_cache)
            ru_norm = np.linalg.norm(unknowns.vec - unknowns_cache)/np.linalg.norm(unknowns.vec)


            if self.options['use_aitken']: # If Aitken acceleration is enabled

                # This method is used by Kenway et al. in "Scalable Parallel
                # Approach for High-Fidelity Steady-State Aeroelastic Analysis
                # and Adjoint Derivative Computations" (line 22 of Algorithm 1)
                # It is based on "A version of the Aitken accelerator for
                # computer iteration" by Irons et al.

                # Use relaxation after second iteration
                # self.delta_u_n_1 is a string for the first iteration
                if (type(self.delta_u_n_1) is not str) and \
                    normval > atol and \
                    normval/basenorm > rtol  and \
                    u_norm > utol and \
                    ru_norm > rutol:

                    delta_u_n = unknowns.vec - unknowns_cache
                    delta_u_n_1 = self.delta_u_n_1

                    # Compute relaxation factor
                    self.aitken_alpha = self.aitken_alpha * \
                        (1. - np.dot((delta_u_n  - delta_u_n_1), delta_u_n) \
                        / np.linalg.norm((delta_u_n  - delta_u_n_1), 2)**2)

                    # Limit relaxation factor to desired range
                    self.aitken_alpha = max(self.options['aitken_alpha_min'],
                        min(self.options['aitken_alpha_max'], self.aitken_alpha))

                    if iprint == 1 or iprint == 2:
                        print("Aitken relaxation factor is", self.aitken_alpha)

                    self.delta_u_n_1 = delta_u_n.copy()

                    # Update unknowns vector
                    unknowns.vec[:] = unknowns_cache + self.aitken_alpha * delta_u_n

                elif (type(self.delta_u_n_1) is str): # For the first iteration
                    # Initially self.delta_u_n_1 is a string then it is replaced
                    # by the following vector
                    self.delta_u_n_1 = unknowns.vec - unknowns_cache

            if iprint == 2:
                self.print_norm(self.print_name, system, self.iter_count, normval,
                                basenorm, u_norm=u_norm)

        # Final residual print if you only want the last one
        if iprint == 1:
            self.print_norm(self.print_name, system, self.iter_count, normval,
                            basenorm, u_norm=u_norm)

        if self.iter_count >= maxiter or isnan(normval):
            msg = 'FAILED to converge after %d iterations' % self.iter_count
            fail = True
        else:
            fail = False

        if iprint > 0 or (fail and iprint > -1 ):
            if not fail:
                msg = 'Converged in %d iterations' % self.iter_count

            self.print_norm(self.print_name, system, self.iter_count, normval,
                            basenorm, msg=msg)

        if fail and self.options['err_on_maxiter']:
            raise AnalysisError("Solve in '%s': NLGaussSeidel %s" %
                                (system.pathname, msg))
Esempio n. 49
0
    def objfunc(self, dv_dict):
        """ Function that evaluates and returns the objective function and
        constraints. This function is passed to pyOpt's Optimization object
        and is called from its optimizers.

        Args
        ----
        dv_dict : dict
            Dictionary of design variable values.

        Returns
        -------
        func_dict : dict
            Dictionary of all functional variables evaluated at design point.

        fail : int
            0 for successful function evaluation
            1 for unsuccessful function evaluation
        """

        fail = 1
        func_dict = {}
        metadata = self.metadata
        system = self.root
        comm = system.comm
        iproc = comm.rank
        nproc = comm.size

        try:
            for name in self.get_params():
                self.set_param(name, dv_dict[name])

            # Execute the model
            #print("Setting DV")
            #print(dv_dict)

            self.iter_count += 1
            update_local_meta(metadata, (self.iter_count,))

            system.solve_nonlinear(metadata=metadata)
            for recorder in self.recorders:
                recorder.raw_record(system.params, system.unknowns,
                                    system.resids, metadata)

            # Get the objective function evaluations
            for name, obj in iteritems(self.get_objectives()):
                # if nproc > 1:
                #     owner = system._owning_ranks[name]
                #     if iproc == owner:
                #         func_dict[name] = comm.bcast(obj, root=owner)
                #     else:
                #         func_dict[name] = comm.bcast(None, root=owner)
                # else:
                func_dict[name] = obj

            # Get the constraint evaluations
            for name, con in iteritems(self.get_constraints()):
                # if nproc > 1:
                #     owner = system._owning_ranks[name]
                #     if iproc == owner:
                #         func_dict[name] = comm.bcast(con, root=owner)
                #     else:
                #         func_dict[name] = comm.bcast(None, root=owner)
                # else:
                func_dict[name] = con

            # Get the double-sided constraint evaluations
            #for key, con in iteritems(self.get_2sided_constraints()):
            #    func_dict[name] = np.array(con.evaluate(self.parent))

            fail = 0

        except Exception as msg:

            # Exceptions seem to be swallowed by the C code, so this
            # should give the user more info than the dreaded "segfault"
            print("Exception: %s" % str(msg))
            print(70*"=")
            import traceback
            traceback.print_exc()
            print(70*"=")
            fail = 1

        #print("Functions calculated")
        #print(func_dict)
        return func_dict, fail
Esempio n. 50
0
    def solve(self, params, unknowns, resids, system, metadata=None):
        """ Solves the system using Gauss Seidel.

        Args
        ----
        params : `VecWrapper`
            `VecWrapper` containing parameters. (p)

        unknowns : `VecWrapper`
            `VecWrapper` containing outputs and states. (u)

        resids : `VecWrapper`
            `VecWrapper` containing residuals. (r)

        system : `System`
            Parent `System` object.

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        atol = self.options['atol']
        rtol = self.options['rtol']
        maxiter = self.options['maxiter']
        iprint = self.options['iprint']

        # Initial run
        self.iter_count = 1

        # Metadata setup
        local_meta = create_local_meta(metadata, system.pathname)
        system.ln_solver.local_meta = local_meta
        update_local_meta(local_meta, (self.iter_count,))

        # Initial Solve
        system.children_solve_nonlinear(local_meta)

        self.recorders.record_iteration(system, local_meta)

        # Bail early if the user wants to.
        if maxiter == 1:
            return

        resids = system.resids

        # Evaluate Norm
        system.apply_nonlinear(params, unknowns, resids)
        normval = resids.norm()
        basenorm = normval if normval > atol else 1.0

        if self.options['iprint'] > 0:
            self.print_norm('NLN_GS', system.pathname, 0, normval, basenorm)

        while self.iter_count < maxiter and \
                normval > atol and \
                normval/basenorm > rtol:

            # Metadata update
            self.iter_count += 1
            update_local_meta(local_meta, (self.iter_count,))

            # Runs an iteration
            system.children_solve_nonlinear(local_meta)
            self.recorders.record_iteration(system, local_meta)

            # Evaluate Norm
            system.apply_nonlinear(params, unknowns, resids)
            normval = resids.norm()

            if self.options['iprint'] > 0:
                self.print_norm('NLN_GS', system.pathname, self.iter_count, normval,
                                basenorm)

        if self.options['iprint'] > 0:
            if self.iter_count == maxiter or isnan(normval):
                msg = 'FAILED to converge after max iterations'
            else:
                msg = 'converged'

            self.print_norm('NLN_GS', system.pathname, self.iter_count, normval,
                            basenorm, msg=msg)
Esempio n. 51
0
    def test_update_meta(self):
        coord = (1, 2, 3)

        update_local_meta(self.meta, coord)
        self.assertEqual(self.meta['name'], '')
        self.assertEqual(self.meta['coord'], ['', coord])
Esempio n. 52
0
    def solve(self, params, unknowns, resids, system, solver, alpha, fnorm,
              fnorm0, metadata=None):
        """ Take the gradient calculated by the parent solver and figure out
        how far to go.

        Args
        ----
        params : `VecWrapper`
            `VecWrapper` containing parameters. (p)

        unknowns : `VecWrapper`
            `VecWrapper` containing outputs and states. (u)

        resids : `VecWrapper`
            `VecWrapper` containing residuals. (r)

        system : `System`
            Parent `System` object.

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).

        solver : `Solver`
            Parent solver instance.

        alpha : float
            Initial over-relaxation factor as used in parent solver.

        fnorm : float
            Initial norm of the residual for absolute tolerance check.

        fnorm0 : float
            Initial norm of the residual for relative tolerance check.
        """

        atol = self.options['atol']
        rtol = self.options['rtol']
        maxiter = self.options['maxiter']
        result = system.dumat[None]

        local_meta = create_local_meta(metadata, system.pathname)
        itercount = 0
        ls_alpha = alpha

        # Backtracking Line Search
        while itercount < maxiter and \
              fnorm > atol and \
              fnorm/fnorm0 > rtol:

            ls_alpha *= 0.5
            unknowns.vec -= ls_alpha*result.vec
            itercount += 1

            # Metadata update
            update_local_meta(local_meta, (solver.iter_count, itercount))

            # Just evaluate the model with the new points
            if self.options['solve_subsystems'] is True:
                system.children_solve_nonlinear(local_meta)
            system.apply_nonlinear(params, unknowns, resids, local_meta)

            solver.recorders.record_iteration(system, local_meta)

            fnorm = resids.norm()
            if self.options['iprint'] > 0:
                self.print_norm('BK_TKG', system.pathname, itercount, fnorm,
                                fnorm0, indent=1, solver='LS')
Esempio n. 53
0
    def solve(self, params, unknowns, resids, system, metadata=None):
        """ Solves the system using Gauss Seidel.

        Args
        ----
        params : `VecWrapper`
            `VecWrapper` containing parameters. (p)

        unknowns : `VecWrapper`
            `VecWrapper` containing outputs and states. (u)

        resids : `VecWrapper`
            `VecWrapper` containing residuals. (r)

        system : `System`
            Parent `System` object.

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        atol = self.options['atol']
        rtol = self.options['rtol']
        maxiter = self.options['maxiter']
        iprint = self.options['iprint']

        # Initial run
        self.iter_count = 1

        # Metadata setup
        local_meta = create_local_meta(metadata, system.pathname)
        system.ln_solver.local_meta = local_meta
        update_local_meta(local_meta, (self.iter_count, ))

        # Initial Solve
        system.children_solve_nonlinear(local_meta)

        self.recorders.record_iteration(system, local_meta)

        # Bail early if the user wants to.
        if maxiter == 1:
            return

        resids = system.resids

        # Evaluate Norm
        system.apply_nonlinear(params, unknowns, resids)
        normval = resids.norm()
        basenorm = normval if normval > atol else 1.0

        if self.options['iprint'] > 0:
            self.print_norm(self.print_name, system.pathname, 0, normval,
                            basenorm)

        while self.iter_count < maxiter and \
                normval > atol and \
                normval/basenorm > rtol:

            # Metadata update
            self.iter_count += 1
            update_local_meta(local_meta, (self.iter_count, ))

            # Runs an iteration
            system.children_solve_nonlinear(local_meta)
            self.recorders.record_iteration(system, local_meta)

            # Evaluate Norm
            system.apply_nonlinear(params, unknowns, resids)
            normval = resids.norm()

            if self.options['iprint'] > 0:
                self.print_norm(self.print_name, system.pathname,
                                self.iter_count, normval, basenorm)

        if self.options['iprint'] > 0:
            if self.iter_count == maxiter or isnan(normval):
                msg = 'FAILED to converge after max iterations'
            else:
                msg = 'converged'

            self.print_norm(self.print_name,
                            system.pathname,
                            self.iter_count,
                            normval,
                            basenorm,
                            msg=msg)
    def _objfunc(self, dv_dict):
        """ Function that evaluates and returns the objective function and
        constraints. This function is passed to pyOpt's Optimization object
        and is called from its optimizers.

        Args
        ----
        dv_dict : dict
            Dictionary of design variable values.

        Returns
        -------
        func_dict : dict
            Dictionary of all functional variables evaluated at design point.

        fail : int
            0 for successful function evaluation
            1 for unsuccessful function evaluation
        """

        fail = 1
        metadata = self.metadata
        system = self.root

        try:
            for name in self.indep_list:
                self.set_desvar(name, dv_dict[name])

            # Execute the model
            #print("Setting DV")
            #print(dv_dict)

            self.iter_count += 1
            update_local_meta(metadata, (self.iter_count,))

            with self.root._dircontext:
                system.solve_nonlinear(metadata=metadata)

            func_dict = self.get_objectives() # this returns a new OrderedDict
            func_dict.update(self.get_constraints())

            # Record after getting obj and constraint to assure they have
            # been gathered in MPI.
            self.recorders.record_iteration(system, metadata)

            # Get the double-sided constraint evaluations
            #for key, con in iteritems(self.get_2sided_constraints()):
            #    func_dict[name] = np.array(con.evaluate(self.parent))

            fail = 0

        except Exception as msg:
            tb = traceback.format_exc()

            # Exceptions seem to be swallowed by the C code, so this
            # should give the user more info than the dreaded "segfault"
            print("Exception: %s" % str(msg))
            print(70*"=",tb,70*"=")
            fail = 1
            func_dict = {}

        #print("Functions calculated")
        #print(func_dict)
        return func_dict, fail
Esempio n. 55
0
    def run(self, problem):
        """Optimize the problem using your choice of Scipy optimizer.

        Args
        ----
        problem : `Problem`
            Our parent `Problem`.
        """

        # Metadata Setup
        opt = self.options['optimizer']
        self.metadata = create_local_meta(None, opt)
        self.iter_count = 0
        update_local_meta(self.metadata, (self.iter_count,))

        # Initial Run
        problem.root.solve_nonlinear(metadata=self.metadata)

        pmeta = self.get_desvar_metadata()
        self.params = list(iterkeys(pmeta))
        self.objs = list(iterkeys(self.get_objectives()))
        con_meta = self.get_constraint_metadata()
        self.cons = list(iterkeys(con_meta))
        self.con_cache = self.get_constraints()

        self.opt_settings['maxiter'] = self.options['maxiter']
        self.opt_settings['disp'] = self.options['disp']

        # Size Problem
        nparam = 0
        for param in itervalues(pmeta):
            nparam += param['size']
        x_init = np.zeros(nparam)

        # Initial Parameters
        i = 0
        use_bounds = (opt in _bounds_optimizers)
        if use_bounds:
            bounds = []
        else:
            bounds = None

        for name, val in iteritems(self.get_desvars()):
            size = pmeta[name]['size']
            x_init[i:i+size] = val
            i += size

            # Bounds if our optimizer supports them
            if use_bounds:
                meta_low = pmeta[name]['lower']
                meta_high = pmeta[name]['upper']
                for j in range(0, size):

                    if isinstance(meta_low, np.ndarray):
                        p_low = meta_low[j]
                    else:
                        p_low = meta_low

                    if isinstance(meta_high, np.ndarray):
                        p_high = meta_high[j]
                    else:
                        p_high = meta_high

                    bounds.append((p_low, p_high))

        # Constraints
        constraints = []
        i = 0
        if opt in _constraint_optimizers:
            for name, meta in con_meta.items():
                size = meta['size']
                for j in range(0, size):
                    con_dict = {}
                    if meta['equals'] is not None:
                        con_dict['type'] = 'eq'
                    else:
                        con_dict['type'] = 'ineq'
                    con_dict['fun'] = self._confunc
                    if opt in _constraint_grad_optimizers:
                        con_dict['jac'] = self._congradfunc
                    con_dict['args'] = [name, j]
                    constraints.append(con_dict)
                self.con_idx[name] = i
                i += size

        # Provide gradients for optimizers that support it
        if opt in _gradient_optimizers:
            jac = self._gradfunc
        else:
            jac = None

        # optimize
        self._problem = problem
        result = minimize(self._objfunc, x_init,
                          #args=(),
                          method=opt,
                          jac=jac,
                          #hess=None,
                          #hessp=None,
                          bounds=bounds,
                          constraints=constraints,
                          tol=self.options['tol'],
                          #callback=None,
                          options=self.opt_settings)

        self._problem = None
        self.result = result
        self.exit_flag = 1 if self.result.success else 0

        print('Optimization Complete')
        print('-'*35)