class KSComp(Component): """Aggregates a number of functions to a single value via the Kreisselmeier-Steinhauser Function.""" def __init__(self, n=2): super(KS, self).__init__() self.n = n # Inputs self.add_param('g', np.zeros((n, )), desc="Array of function values to be aggregated") # Outputs self.add_output('KS', 0.0, desc="Value of the aggregate KS function") self.options = OptionsDictionary() self.options.add_option(rho, 0.1, desc="Hyperparameter for the KS function") self._ks = KSfunction() def solve_nonlinear(self, params, unknowns, resids): """ Calculate output. """ unknowns['KS'] = self._ks.compute(params['g'], self.options['rho']) def jacobian(self, params, unknowns, resids): """ Calculate and save derivatives. (i.e., Jacobian) """ #use g_max, exponsnte, summation from last executed point J = {} J['KS', 'g'] = np.hstack(self._ks.derivatives())
def __init__(self): super(Driver, self).__init__() self.recorders = [] # What this driver supports self.supports = OptionsDictionary(read_only=True) self.supports.add_option('inequality_constraints', True) self.supports.add_option('equality_constraints', True) self.supports.add_option('linear_constraints', False) self.supports.add_option('multiple_objectives', False) self.supports.add_option('two_sided_constraints', False) self.supports.add_option('integer_parameters', False) # This driver's options self.options = OptionsDictionary() self._params = OrderedDict() self._objs = OrderedDict() self._cons = OrderedDict() self._voi_sets = [] # We take root during setup self.root = None self.iter_count = 0
def __init__(self): self.iter_count = 0 self.options = OptionsDictionary() desc = 'Set to 0 to disable printing, set to 1 to print the ' \ 'residual to stdout each iteration, set to 2 to print ' \ 'subiteration residuals as well.' self.options.add_option('iprint', 0, values=[0, 1, 2], desc=desc) self.recorders = [] self.local_meta = None
def __init__(self): self.options = OptionsDictionary() self.options.add_option( 'includes', ['*'], desc='Patterns for variables to include in recording') self.options.add_option( 'excludes', [], desc='Patterns for variables to exclude from recording ' '(processed after includes)') self.out = None self._filtered = {}
def __init__(self): super(MySimpleDriver, self).__init__() # What we support self.supports['inequality_constraints'] = True self.supports['equality_constraints'] = False self.supports['linear_constraints'] = False self.supports['multiple_objectives'] = False # My driver options self.options = OptionsDictionary() self.options.add_option('tol', 1e-4) self.options.add_option('maxiter', 10) self.alpha = .01 self.violated = []
def __init__(self): super(Driver, self).__init__() self.recorders = [] # What this driver supports self.supports = OptionsDictionary(read_only=True) self.supports.add_option("Inequality Constraints", True) self.supports.add_option("Equality Constraints", True) self.supports.add_option("Linear Constraints", False) self.supports.add_option("Multiple Objectives", False) self.supports.add_option("2-Sided Constraints", False) self.supports.add_option("Integer Parameters", False) # This driver's options self.options = OptionsDictionary() self._params = OrderedDict() self._objs = OrderedDict() self._cons = OrderedDict() self._voi_sets = [] # We take root during setup self.root = None self.iter_count = 0
def __init__(self, n=2, h=.01): super(RK4, self).__init__() self.h = h # Inputs # All inputs are defined in subclasses. # Options self.options = opt = OptionsDictionary() opt.add_option('state_var', '', desc="Name of the variable to be used for time " "integration") opt.add_option('init_state_var', '', desc="Name of the variable to be used for initial " "conditions") opt.add_option('external_vars', [], desc="List of names of variables that are external " "to the system but DO vary with time.") opt.add_option('fixed_external_vars', [], desc="List of names of variables that are " "external to the system but DO NOT " "vary with time.")
def __init__(self): super(ExternalCode, self).__init__() self.STDOUT = STDOUT self.DEV_NULL = DEV_NULL # Input options for this Component self.options = OptionsDictionary() self.options.add_option('command', [], desc='command to be executed') self.options.add_option('env_vars', {}, desc='Environment variables required by the command') self.options.add_option('poll_delay', 0.0, desc='Delay between polling for command completion. A value of zero will use an internally computed default') self.options.add_option('timeout', 0.0, desc='Maximum time to wait for command completion. A value of zero implies an infinite wait') self.options.add_option('check_external_outputs', True, desc='Check that all input or output external files exist') self.options.add_option( 'external_input_files', [], desc='(optional) list of input file names to check the pressence of before solve_nonlinear') self.options.add_option( 'external_output_files', [], desc='(optional) list of input file names to check the pressence of after solve_nonlinear') # Outputs of the run of the component or items that will not work with the OptionsDictionary self.return_code = 0 # Return code from the command self.timed_out = False # True if the command timed-out self.stdin = self.DEV_NULL self.stdout = None self.stderr = "error.out"
def __init__(self): super(Driver, self).__init__() self.recorders = RecordingManager() # What this driver supports self.supports = OptionsDictionary(read_only=True) self.supports.add_option("inequality_constraints", True) self.supports.add_option("equality_constraints", True) self.supports.add_option("linear_constraints", True) self.supports.add_option("multiple_objectives", True) self.supports.add_option("two_sided_constraints", True) self.supports.add_option("integer_design_vars", True) # This driver's options self.options = OptionsDictionary() self._desvars = OrderedDict() self._objs = OrderedDict() self._cons = OrderedDict() self._voi_sets = [] self._vars_to_record = None # We take root during setup self.root = None self.iter_count = 0
def __init__(self): self.options = OptionsDictionary() self.options.add_option('includes', ['*'], desc='Patterns for variables to include in recording') self.options.add_option('excludes', [], desc='Patterns for variables to exclude from recording ' '(processed after includes)') self.out = None self._filtered = {}
def __init__(self, n=2): super(KS, self).__init__() self.n = n # Inputs self.add_param('g', np.zeros((n, )), desc="Array of function values to be aggregated") # Outputs self.add_output('KS', 0.0, desc="Value of the aggregate KS function") self.options = OptionsDictionary() self.options.add_option(rho, 0.1, desc="Hyperparameter for the KS function") self._ks = KSfunction()
def __init__(self): super(ExternalCode, self).__init__() self.STDOUT = STDOUT self.DEV_NULL = DEV_NULL # Input options for this Component self.options = OptionsDictionary() self.options.add_option('command', [], desc='command to be executed') self.options.add_option( 'env_vars', {}, desc='Environment variables required by the command') self.options.add_option( 'poll_delay', 0.0, desc='''Delay between polling for command completion. A value of zero will use an internally computed default''') self.options.add_option('timeout', 0.0, desc='''Maximum time to wait for command completion. A value of zero implies an infinite wait''') self.options.add_option( 'check_external_outputs', True, desc='Check that all input or output external files exist') self.options.add_option( 'external_input_files', [], desc= '(optional) list of input file names to check the pressence of before solve_nonlinear' ) self.options.add_option( 'external_output_files', [], desc= '(optional) list of input file names to check the pressence of after solve_nonlinear' ) # Outputs of the run of the component or items that will not work with the OptionsDictionary self.return_code = 0 # Return code from the command self.timed_out = False # True if the command timed-out self.stdin = self.DEV_NULL self.stdout = None self.stderr = "error.out"
def __init__(self): super(MySimpleDriver, self).__init__() # What we support self.supports["Inequality Constraints"] = True self.supports["Equality Constraints"] = False self.supports["Linear Constraints"] = False self.supports["Multiple Objectives"] = False # My driver options self.options = OptionsDictionary() self.options.add_option("tol", 1e-4) self.options.add_option("maxiter", 10) self.alpha = 0.01 self.violated = []
def __init__(self): self.options = OptionsDictionary() self.options.add_option('includes', ['*'], desc='Patterns for variables to include in recording') self.options.add_option('excludes', [], desc='Patterns for variables to exclude from recording ' '(processed after includes)') self.out = None # This is for drivers to determine if a recorder supports # real parallel recording (recording on each process), because # if it doesn't, the driver figures out what variables must # be gathered to rank 0 if running under MPI. # # By default, this is False, but it should be set to True # if the recorder will record data on each process to avoid # unnecessary gathering. self._parallel = False self._filtered = {}
class ExternalCode(Component): """Run an external code as a component Default stdin is the 'null' device, default stdout is the console, and default stderr is ``error.out``. """ def __init__(self): super(ExternalCode, self).__init__() self.STDOUT = STDOUT self.DEV_NULL = DEV_NULL # Input options for this Component self.options = OptionsDictionary() self.options.add_option('command', [], desc='command to be executed') self.options.add_option( 'env_vars', {}, desc='Environment variables required by the command') self.options.add_option( 'poll_delay', 0.0, desc='''Delay between polling for command completion. A value of zero will use an internally computed default''') self.options.add_option('timeout', 0.0, desc='''Maximum time to wait for command completion. A value of zero implies an infinite wait''') self.options.add_option( 'check_external_outputs', True, desc='Check that all input or output external files exist') self.options.add_option( 'external_input_files', [], desc= '(optional) list of input file names to check the pressence of before solve_nonlinear' ) self.options.add_option( 'external_output_files', [], desc= '(optional) list of input file names to check the pressence of after solve_nonlinear' ) # Outputs of the run of the component or items that will not work with the OptionsDictionary self.return_code = 0 # Return code from the command self.timed_out = False # True if the command timed-out self.stdin = self.DEV_NULL self.stdout = None self.stderr = "error.out" def check_setup(self, out_stream=sys.stdout): """Write a report to the given stream indicating any potential problems found with the current configuration of this ``Problem``. Args ---- out_stream : a file-like object, optional """ # check for the command if not self.options['command']: out_stream.write("The command cannot be empty") else: if isinstance(self.options['command'], str): program_to_execute = self.options['command'] else: program_to_execute = self.options['command'][0] command_full_path = find_executable(program_to_execute) if not command_full_path: msg = "The command to be executed, '%s', cannot be found" % program_to_execute out_stream.write(msg) # Check for missing input files missing_files = self._check_for_files(input=True) for iotype, path in missing_files: msg = "The %s file %s is missing" % (iotype, path) out_stream.write(msg) def solve_nonlinear(self, params, unknowns, resids): """Runs the component """ self.return_code = -12345678 self.timed_out = False if not self.options['command']: raise ValueError('Empty command list') # self.check_files(inputs=True) return_code = None error_msg = '' try: return_code, error_msg = self._execute_local() if return_code is None: # if self._stop: # raise RuntimeError('Run stopped') # else: self.timed_out = True raise RuntimeError('Timed out') elif return_code: if isinstance(self.stderr, str): if os.path.exists(self.stderr): stderrfile = open(self.stderr, 'r') error_desc = stderrfile.read() stderrfile.close() err_fragment = "\nError Output:\n%s" % error_desc else: err_fragment = "\n[stderr %r missing]" % self.stderr else: err_fragment = error_msg raise RuntimeError('return_code = %d%s' % (return_code, err_fragment)) if self.options['check_external_outputs']: missing_files = self._check_for_files(input=False) msg = "" for iotype, path in missing_files: msg += "%s file %s is missing\n" % (iotype, path) if msg: raise RuntimeError("Missing files: %s" % msg) # self.check_files(inputs=False) finally: self.return_code = -999999 if return_code is None else return_code def _check_for_files(self, input=True): """ Check that all 'specific' input external files exist. input: bool If True, check inputs. Else check outputs """ missing_files = [] if input: files = self.options['external_input_files'] else: files = self.options['external_output_files'] for path in files: if not os.path.exists(path): missing_files.append(('input', path)) return missing_files def _execute_local(self): """ Run command. """ #self._logger.info('executing %s...', self.options['command']) # start_time = time.time() # check to make sure command exists if isinstance(self.options['command'], str): program_to_execute = self.options['command'] else: program_to_execute = self.options['command'][0] command_full_path = find_executable(program_to_execute) if not command_full_path: raise ValueError( "The command to be executed, '%s', cannot be found" % program_to_execute) command_for_shell_proc = self.options['command'] if sys.platform == 'win32': command_for_shell_proc = ['cmd.exe', '/c'] + command_for_shell_proc self._process = \ ShellProc(command_for_shell_proc, self.stdin, self.stdout, self.stderr, self.options['env_vars']) #self._logger.debug('PID = %d', self._process.pid) try: return_code, error_msg = \ self._process.wait(self.options['poll_delay'], self.options['timeout']) finally: self._process.close_files() self._process = None # et = time.time() - start_time #if et >= 60: #pragma no cover #self._logger.info('elapsed time: %.1f sec.', et) return (return_code, error_msg)
class ExternalCode(Component): """ Run an external code as a component Default stdin is the 'null' device, default stdout is the console, and default stderr is ``error.out``. Options ------- fd_options['force_fd'] : bool(False) Set to True to finite difference this system. fd_options['form'] : str('forward') Finite difference mode. (forward, backward, central) You can also set to 'complex_step' to peform the complex step method if your components support it. fd_options['step_size'] : float(1e-06) Default finite difference stepsize fd_options['step_type'] : str('absolute') Set to absolute, relative options['check_external_outputs'] : bool(True) Check that all input or output external files exist options['command'] : list([]) command to be executed options['env_vars'] : dict({}) Environment variables required by the command options['external_input_files'] : list([]) (optional) list of input file names to check the pressence of before solve_nonlinear options['external_output_files'] : list([]) (optional) list of input file names to check the pressence of after solve_nonlinear options['poll_delay'] : float(0.0) Delay between polling for command completion. A value of zero will use an internally computed default options['timeout'] : float(0.0) Maximum time to wait for command completion. A value of zero implies an infinite wait """ def __init__(self): super(ExternalCode, self).__init__() self.STDOUT = STDOUT self.DEV_NULL = DEV_NULL # Input options for this Component self.options = OptionsDictionary() self.options.add_option('command', [], desc='command to be executed') self.options.add_option('env_vars', {}, desc='Environment variables required by the command') self.options.add_option('poll_delay', 0.0, desc='Delay between polling for command completion. A value of zero will use an internally computed default') self.options.add_option('timeout', 0.0, desc='Maximum time to wait for command completion. A value of zero implies an infinite wait') self.options.add_option('check_external_outputs', True, desc='Check that all input or output external files exist') self.options.add_option( 'external_input_files', [], desc='(optional) list of input file names to check the pressence of before solve_nonlinear') self.options.add_option( 'external_output_files', [], desc='(optional) list of input file names to check the pressence of after solve_nonlinear') # Outputs of the run of the component or items that will not work with the OptionsDictionary self.return_code = 0 # Return code from the command self.timed_out = False # True if the command timed-out self.stdin = self.DEV_NULL self.stdout = None self.stderr = "error.out" def check_setup(self, out_stream=sys.stdout): """Write a report to the given stream indicating any potential problems found with the current configuration of this ``Problem``. Args ---- out_stream : a file-like object, optional """ # check for the command if not self.options['command']: out_stream.write( "The command cannot be empty") else: if isinstance(self.options['command'], str): program_to_execute = self.options['command'] else: program_to_execute = self.options['command'][0] command_full_path = find_executable( program_to_execute ) if not command_full_path: msg = "The command to be executed, '%s', cannot be found" % program_to_execute out_stream.write(msg) # Check for missing input files missing_files = self._check_for_files(input=True) for iotype, path in missing_files: msg = "The %s file %s is missing" % ( iotype, path ) out_stream.write(msg) def solve_nonlinear(self, params, unknowns, resids): """Runs the component """ self.return_code = -12345678 self.timed_out = False if not self.options['command']: raise ValueError('Empty command list') # self.check_files(inputs=True) return_code = None error_msg = '' try: return_code, error_msg = self._execute_local() if return_code is None: # if self._stop: # raise RuntimeError('Run stopped') # else: self.timed_out = True raise RuntimeError('Timed out') elif return_code: if isinstance(self.stderr, str): if os.path.exists(self.stderr): stderrfile = open(self.stderr, 'r') error_desc = stderrfile.read() stderrfile.close() err_fragment = "\nError Output:\n%s" % error_desc else: err_fragment = "\n[stderr %r missing]" % self.stderr else: err_fragment = error_msg raise RuntimeError('return_code = %d%s' % (return_code, err_fragment)) if self.options['check_external_outputs']: missing_files = self._check_for_files(input=False) msg = "" for iotype, path in missing_files: msg += "%s file %s is missing\n" % (iotype, path) if msg: raise RuntimeError( "Missing files: %s" % msg ) # self.check_files(inputs=False) finally: self.return_code = -999999 if return_code is None else return_code def _check_for_files(self, input=True): """ Check that all 'specific' input external files exist. input: bool If True, check inputs. Else check outputs """ missing_files = [] if input: files = self.options['external_input_files'] else: files = self.options['external_output_files'] for path in files: if not os.path.exists(path): missing_files.append(('input', path)) return missing_files def _execute_local(self): """ Run command. """ #self._logger.info('executing %s...', self.options['command']) # start_time = time.time() # check to make sure command exists if isinstance(self.options['command'], str): program_to_execute = self.options['command'] else: program_to_execute = self.options['command'][0] command_full_path = find_executable( program_to_execute ) if not command_full_path: raise ValueError("The command to be executed, '%s', cannot be found" % program_to_execute) command_for_shell_proc = self.options['command'] if sys.platform == 'win32': command_for_shell_proc = ['cmd.exe', '/c' ] + command_for_shell_proc self._process = \ ShellProc(command_for_shell_proc, self.stdin, self.stdout, self.stderr, self.options['env_vars']) #self._logger.debug('PID = %d', self._process.pid) try: return_code, error_msg = \ self._process.wait(self.options['poll_delay'], self.options['timeout']) finally: self._process.close_files() self._process = None # et = time.time() - start_time #if et >= 60: #pragma no cover #self._logger.info('elapsed time: %.1f sec.', et) return (return_code, error_msg)
def test_options_dictionary(self): self.options = OptionsDictionary() # Make sure we can't address keys we haven't added with self.assertRaises(KeyError) as cm: self.options['junk'] self.assertEqual('"Option \'{}\' has not been added"'.format('junk'), str(cm.exception)) # Type checking - don't set a float with an int self.options.add_option('atol', 1e-6) self.assertEqual(self.options['atol'], 1.0e-6) with self.assertRaises(ValueError) as cm: self.options['atol'] = 1 if PY2: self.assertEqual("'atol' should be a '<type 'float'>'", str(cm.exception)) else: self.assertEqual("'atol' should be a '<class 'float'>'", str(cm.exception)) # Check enum out of range self.options.add_option('iprint', 0, values = [0, 1, 2, 3]) for value in [0,1,2,3]: self.options['iprint'] = value with self.assertRaises(ValueError) as cm: self.options['iprint'] = 4 self.assertEqual("'iprint' must be one of the following values: '[0, 1, 2, 3]'", str(cm.exception)) # Type checking for boolean self.options.add_option('conmin_diff', True) self.options['conmin_diff'] = True self.options['conmin_diff'] = False with self.assertRaises(ValueError) as cm: self.options['conmin_diff'] = "YES!" if PY2: self.assertEqual("'conmin_diff' should be a '<type 'bool'>'", str(cm.exception)) else: self.assertEqual("'conmin_diff' should be a '<class 'bool'>'", str(cm.exception)) # Test Max and Min self.options.add_option('maxiter', 10, low=0, high=10) for value in range(0, 11): self.options['maxiter'] = value with self.assertRaises(ValueError) as cm: self.options['maxiter'] = 15 self.assertEqual("maximum allowed value for 'maxiter' is '10'", str(cm.exception)) with self.assertRaises(ValueError) as cm: self.options['maxiter'] = -1 self.assertEqual("minimum allowed value for 'maxiter' is '0'", str(cm.exception))
class BaseRecorder(object): """ Base class for all case recorders. """ def __init__(self): self.options = OptionsDictionary() self.options.add_option('includes', ['*'], desc='Patterns for variables to include in recording') self.options.add_option('excludes', [], desc='Patterns for variables to exclude from recording ' '(processed after includes)') self.out = None self._filtered = {} # TODO: System specific includes/excludes def startup(self, group): """ Prepare for new run. """ # Compute the inclusion lists for recording params = list(filter(self._check_path, group.params)) unknowns = list(filter(self._check_path, group.unknowns)) resids = list(filter(self._check_path, group.resids)) self._filtered[group.pathname] = (params, unknowns, resids) def _check_path(self, path): """ Return True if `path` should be recorded. """ includes = self.options['includes'] excludes = self.options['excludes'] # First see if it's included for pattern in includes: if fnmatch(path, pattern): # We found a match. Check to see if it is excluded. for ex_pattern in excludes: if fnmatch(path, ex_pattern): return False return True # Did not match anything in includes. return False def raw_record(self, params, unknowns, resids, metadata): """ This is the method that drivers and solvers will call during their execution to record their current state. This method is responsible for filtering the provided data to reflect the includes/excludes provided by the user and then calling `self.record`. Recorder subclasses should override `record`, altering this function should not be necessary. """ # Coord will look like ['Driver', (1,), 'root', (1,), 'G1', (1,1), ...] # So the pathname is every other entry, starting with the fifth. pathname = '.'.join(metadata['coord'][4::2]) pnames, unames, rnames = self._filtered[pathname] filtered_params = {key: params[key] for key in pnames} filtered_unknowns = {key: unknowns[key] for key in unames} filtered_resids = {key: resids[key] for key in rnames} self.record(filtered_params, filtered_unknowns, filtered_resids, metadata) def record(self, params, unknowns, resids, metadata): raise NotImplementedError("record") def close(self): """Closes `out` unless it's ``sys.stdout``, ``sys.stderr``, or StringIO. Note that a closed recorder will do nothing in :meth:`record`.""" # Closing a StringIO deletes its contents. if self.out not in (None, sys.stdout, sys.stderr): if not isinstance(self.out, StringIO): self.out.close() self.out = None
class BaseRecorder(object): """ Base class for all case recorders. """ def __init__(self): self.options = OptionsDictionary() self.options.add_option('includes', ['*'], desc='Patterns for variables to include in recording') self.options.add_option('excludes', [], desc='Patterns for variables to exclude from recording ' '(processed after includes)') self.out = None # This is for drivers to determine if a recorder supports # real parallel recording (recording on each process), because # if it doesn't, the driver figures out what variables must # be gathered to rank 0 if running under MPI. # # By default, this is False, but it should be set to True # if the recorder will record data on each process to avoid # unnecessary gathering. self._parallel = False self._filtered = {} # TODO: System specific includes/excludes def startup(self, group): """ Prepare for a new run. Args ---- group : `Group` Group that owns this recorder. """ # Compute the inclusion lists for recording params = list(filter(self._check_path, group.params)) unknowns = list(filter(self._check_path, group.unknowns)) resids = list(filter(self._check_path, group.resids)) self._filtered[group.pathname] = (params, unknowns, resids) def _check_path(self, path): """ Return True if `path` should be recorded. """ includes = self.options['includes'] excludes = self.options['excludes'] # First see if it's included for pattern in includes: if fnmatch(path, pattern): # We found a match. Check to see if it is excluded. for ex_pattern in excludes: if fnmatch(path, ex_pattern): return False return True # Did not match anything in includes. return False def _get_pathname(self, iteration_coordinate): ''' Converts an iteration coordinate to key to index `_filtered` to retrieve names of variables to be recorder ''' return '.'.join(iteration_coordinate[4::2]) def _filter_vectors(self, params, unknowns, resids, iteration_coordinate): ''' Returns subset of `params`, `unknowns` and `resids` to be recoder ''' pathname = self._get_pathname(iteration_coordinate) pnames, unames, rnames = self._filtered[pathname] params = {key: params[key] for key in pnames} unknowns = {key: unknowns[key] for key in unames} resids = {key: resids[key] for key in rnames} return params, unknowns, resids def record(self, params, unknowns, resids, metadata): """ Records the requested variables. This method must be defined in all recorders. Args ---- params : `VecWrapper` `VecWrapper` containing parameters. (p) unknowns : `VecWrapper` `VecWrapper` containing outputs and states. (u) resids : `VecWrapper` `VecWrapper` containing residuals. (r) metadata : dict Dictionary containing execution metadata (e.g. iteration coordinate). """ raise NotImplementedError("record") def close(self): """Closes `out` unless it's ``sys.stdout``, ``sys.stderr``, or StringIO. Note that a closed recorder will do nothing in :meth:`record`.""" # Closing a StringIO deletes its contents. if self.out not in (None, sys.stdout, sys.stderr): if not isinstance(self.out, StringIO): self.out.close() self.out = None
class Driver(object): """ Base class for drivers in OpenMDAO. Drivers can only be placed in a Problem, and every problem has a Driver. Driver is the simplest driver that runs (solves using solve_nonlinear) a problem once. """ def __init__(self): super(Driver, self).__init__() self.recorders = [] # What this driver supports self.supports = OptionsDictionary(read_only=True) self.supports.add_option('inequality_constraints', True) self.supports.add_option('equality_constraints', True) self.supports.add_option('linear_constraints', False) self.supports.add_option('multiple_objectives', False) self.supports.add_option('two_sided_constraints', False) self.supports.add_option('integer_parameters', False) # This driver's options self.options = OptionsDictionary() self._params = OrderedDict() self._objs = OrderedDict() self._cons = OrderedDict() self._voi_sets = [] # We take root during setup self.root = None self.iter_count = 0 def _setup(self, root): """ Updates metadata for params, constraints and objectives, and check for errors. """ self.root = root params = OrderedDict() objs = OrderedDict() cons = OrderedDict() item_tups = [('Parameter', self._params, params), ('Objective', self._objs, objs), ('Constraint', self._cons, cons)] for item_name, item, newitem in item_tups: for name, meta in iteritems(item): rootmeta = root.unknowns.metadata(name) if MPI and 'src_indices' in rootmeta: raise ValueError("'%s' is a distributed variable and may " "not be used as a parameter, objective, " "or constraint." % name) # Check validity of variable if name not in root.unknowns: msg = "{} '{}' not found in unknowns." msg = msg.format(item_name, name) raise ValueError(msg) if rootmeta.get('remote'): continue # Size is useful metadata to save if 'indices' in meta: meta['size'] = len(meta['indices']) else: meta['size'] = rootmeta['size'] newitem[name] = meta self._params = params self._objs = objs self._cons = cons def _map_voi_indices(self): poi_indices = {} qoi_indices = {} for name, meta in chain(iteritems(self._cons), iteritems(self._objs)): # set indices of interest if 'indices' in meta: qoi_indices[name] = meta['indices'] for name, meta in iteritems(self._params): # set indices of interest if 'indices' in meta: poi_indices[name] = meta['indices'] return poi_indices, qoi_indices def _of_interest(self, voi_list): """Return a list of tuples, with the given voi_list organized into tuples based on the previously defined grouping of VOIs. """ vois = [] done_sets = set() for v in voi_list: for voi_set in self._voi_sets: if voi_set in done_sets: break if v in voi_set: vois.append(tuple([x for x in voi_set if x in voi_list])) done_sets.add(voi_set) break else: vois.append((v, )) return vois def params_of_interest(self): """ Returns ------- list of tuples of str The list of params, organized into tuples according to previously defined VOI groups. """ return self._of_interest(self._params) def outputs_of_interest(self): """ Returns ------- list of tuples of str The list of constraints and objectives, organized into tuples according to previously defined VOI groups. """ return self._of_interest(list(chain(self._objs, self._cons))) def parallel_derivs(self, vnames): """ Specifies that the named variables of interest are to be grouped together so that their derivatives can be solved for concurrently. Args ---- vnames : iter of str The names of variables of interest that are to be grouped. """ for grp in self._voi_sets: for vname in vnames: if vname in grp: msg = "'%s' cannot be added to VOI set %s because it " + \ "already exists in VOI set: %s" raise RuntimeError(msg % (vname, tuple(vnames), grp)) param_intsect = set(vnames).intersection(self._params.keys()) if param_intsect and len(param_intsect) != len(vnames): raise RuntimeError( "%s cannot be grouped because %s are params and %s are not." % (vnames, list(param_intsect), list(set(vnames).difference(param_intsect)))) self._voi_sets.append(tuple(vnames)) def add_recorder(self, recorder): """ Adds a recorder to the driver. Args ---- recorder : BaseRecorder A recorder instance. """ self.recorders.append(recorder) def add_param(self, name, low=None, high=None, indices=None, adder=0.0, scaler=1.0): """ Adds a parameter to this driver. Args ---- name : string Name of the paramcomp in the root system. low : float or ndarray, optional Lower boundary for the param high : upper or ndarray, optional Lower boundary for the param indices : iter of int, optional If a param is an array, these indicate which entries are of interest for derivatives. adder : float or ndarray, optional Value to add to the model value to get the scaled value. Adder is first in precedence. scaler : float or ndarray, optional value to multiply the model value to get the scaled value. Scaler is second in precedence. """ if low is None: low = -1e99 elif isinstance(low, np.ndarray): low = low.flatten() if high is None: high = 1e99 elif isinstance(high, np.ndarray): high = high.flatten() if isinstance(adder, np.ndarray): adder = adder.flatten() if isinstance(scaler, np.ndarray): scaler = scaler.flatten() # Scale the low and high values low = (low + adder) * scaler high = (high + adder) * scaler param = {} param['low'] = low param['high'] = high param['adder'] = adder param['scaler'] = scaler if indices: param['indices'] = np.array(indices, dtype=int) self._params[name] = param def get_params(self): """ Returns a dict of parameters. Returns ------- dict Keys are the param object names, and the values are the param values. """ uvec = self.root.unknowns params = OrderedDict() for key, meta in iteritems(self._params): scaler = meta['scaler'] adder = meta['adder'] flatval = uvec.flat[key] if 'indices' in meta: # Make sure our indices are valid try: flatval = flatval[meta['indices']] except IndexError: msg = "Index for parameter '{}' is out of bounds. " msg += "Requested index: {}, " msg += "Parameter shape: {}." raise IndexError( msg.format(key, meta['indices'], uvec.metadata(key)['shape'])) if isinstance(scaler, np.ndarray) or isinstance(adder, np.ndarray) \ or scaler != 1.0 or adder != 0.0: params[key] = (flatval + adder) * scaler else: params[key] = flatval return params def get_param_metadata(self): """ Returns a dict of parameter metadata. Returns ------- dict Keys are the param object names, and the values are the param values. """ return self._params def set_param(self, name, value): """ Sets a parameter. Args ---- name : string Name of the paramcomp in the root system. val : ndarray or float value to set the parameter """ scaler = self._params[name]['scaler'] adder = self._params[name]['adder'] if isinstance(scaler, np.ndarray) or isinstance(adder, np.ndarray) \ or scaler != 1.0 or adder != 0.0: self.root.unknowns[name] = value / scaler - adder else: self.root.unknowns[name] = value def add_objective(self, name, indices=None, adder=0.0, scaler=1.0): """ Adds an objective to this driver. Args ---- name : string Promoted pathname of the output that will serve as the objective. indices : iter of int, optional If an objective is an array, these indicate which entries are of interest for derivatives. adder : float or ndarray, optional Value to add to the model value to get the scaled value. Adder is first in precedence. scaler : float or ndarray, optional value to multiply the model value to get the scaled value. Scaler is second in precedence. """ if isinstance(adder, np.ndarray): adder = adder.flatten() if isinstance(scaler, np.ndarray): scaler = scaler.flatten() obj = {} obj['adder'] = adder obj['scaler'] = scaler if indices: obj['indices'] = indices if len(indices) > 1 and not self.supports['multiple_objectives']: raise RuntimeError("Multiple objective indices specified for " "variable '%s', but driver '%s' doesn't " "support multiple objectives." % (name, self.pathname)) self._objs[name] = obj def get_objectives(self, return_type='dict'): """ Gets all objectives of this driver. Args ---- return_type : string Set to 'dict' to return a dictionary, or set to 'array' to return a flat ndarray. Returns ------- dict (for return_type 'dict') Key is the objective name string, value is an ndarray with the values. ndarray (for return_type 'array') Array containing all objective values in the order they were added. """ uvec = self.root.unknowns objs = OrderedDict() for key, meta in iteritems(self._objs): scaler = meta['scaler'] adder = meta['adder'] flatval = uvec.flat[key] if 'indices' in meta: # Make sure our indices are valid try: flatval = flatval[meta['indices']] except IndexError: msg = "Index for objective '{}' is out of bounds. " msg += "Requested index: {}, " msg += "Parameter shape: {}." raise IndexError( msg.format(key, meta['indices'], uvec.metadata(key)['shape'])) if isinstance(scaler, np.ndarray) or isinstance(adder, np.ndarray) \ or adder != 0.0 or scaler != 1.0: objs[key] = (flatval + adder) * scaler else: objs[key] = flatval return objs def add_constraint(self, name, ctype='ineq', linear=False, jacs=None, indices=None, adder=0.0, scaler=1.0): """ Adds a constraint to this driver. Args ---- name : string Promoted pathname of the output that will serve as the objective. ctype : string Set to 'ineq' for inequality constraints, or 'eq' for equality constraints. Make sure your driver supports the ctype of constraint that you are adding. linear : bool, optional Set to True if this constraint is linear with respect to all params so that it can be calculated once and cached. jacs : dict of functions, optional Dictionary of user-defined functions that return the flattened Jacobian of this constraint with repsect to the params of this driver, as indicated by the dictionary keys. Default is None to let OpenMDAO calculate all derivatives. Note, this is currently unsupported indices : iter of int, optional If a constraint is an array, these indicate which entries are of interest for derivatives. adder : float or ndarray, optional Value to add to the model value to get the scaled value. Adder is first in precedence. scaler : float or ndarray, optional value to multiply the model value to get the scaled value. Scaler is second in precedence. """ if ctype == 'eq' and self.supports['equality_constraints'] is False: msg = "Driver does not support equality constraint '{}'." raise RuntimeError(msg.format(name)) if ctype == 'ineq' and self.supports['inequality_constraints'] is False: msg = "Driver does not support inequality constraint '{}'." raise RuntimeError(msg.format(name)) if isinstance(adder, np.ndarray): adder = adder.flatten() if isinstance(scaler, np.ndarray): scaler = scaler.flatten() con = {} con['linear'] = linear con['ctype'] = ctype con['adder'] = adder con['scaler'] = scaler con['jacs'] = jacs if indices: con['indices'] = indices self._cons[name] = con def get_constraints(self, ctype='all', lintype='all'): """ Gets all constraints for this driver. Args ---- ctype : string Default is 'all'. Optionally return just the inequality constraints with 'ineq' or the equality constraints with 'eq'. lintype : string Default is 'all'. Optionally return just the linear constraints with 'linear' or the nonlinear constraints with 'nonlinear'. Returns ------- dict Key is the constraint name string, value is an ndarray with the values. """ uvec = self.root.unknowns cons = OrderedDict() for key, meta in iteritems(self._cons): if lintype == 'linear' and meta['linear'] == False: continue if lintype == 'nonlinear' and meta['linear']: continue if ctype == 'eq' and meta['ctype'] == 'ineq': continue if ctype == 'ineq' and meta['ctype'] == 'eq': continue scaler = meta['scaler'] adder = meta['adder'] flatval = uvec.flat[key] if 'indices' in meta: # Make sure our indices are valid try: flatval = flatval[meta['indices']] except IndexError: msg = "Index for constraint '{}' is out of bounds. " msg += "Requested index: {}, " msg += "Parameter shape: {}." raise IndexError( msg.format(key, meta['indices'], uvec.metadata(key)['shape'])) if isinstance(scaler, np.ndarray) or isinstance(adder, np.ndarray) \ or adder != 0.0 or scaler != 1.0: cons[key] = (flatval + adder) * scaler else: cons[key] = flatval return cons def get_constraint_metadata(self): """ Returns a dict of constraint metadata. Returns ------- dict Keys are the constraint object names, and the values are the param values. """ return self._cons def run(self, problem): """ Runs the driver. This function should be overriden when inheriting. Args ---- problem : `Problem` Our parent `Problem`. """ system = problem.root # Metadata Setup self.iter_count += 1 metadata = create_local_meta(None, 'Driver') system.ln_solver.local_meta = metadata update_local_meta(metadata, (self.iter_count, )) # Solve the system once and record results. system.solve_nonlinear(metadata=metadata) for recorder in self.recorders: recorder.raw_record(system.params, system.unknowns, system.resids, metadata)
class MySimpleDriver(Driver): def __init__(self): super(MySimpleDriver, self).__init__() # What we support self.supports['inequality_constraints'] = True self.supports['equality_constraints'] = False self.supports['linear_constraints'] = False self.supports['multiple_objectives'] = False # My driver options self.options = OptionsDictionary() self.options.add_option('tol', 1e-4) self.options.add_option('maxiter', 10) self.alpha = .01 self.violated = [] def run(self, problem): """ Mimic a very simplistic unconstrained optimization.""" # Get dicts with pointers to our vectors params = self.get_params() objective = self.get_objectives() constraints = self.get_constraints() param_list = params.keys() objective_names = list(objective.keys()) constraint_names = list(constraints.keys()) unknown_list = objective_names + constraint_names itercount = 0 while itercount < self.options['maxiter']: # Run the model problem.root.solve_nonlinear() #print('z1: %f, z2: %f, x1: %f, y1: %f, y2: %f' % (problem['z'][0], #problem['z'][1], #problem['x'], #problem['y1'], #problem['y2'])) #print('obj: %f, con1: %f, con2: %f' % (problem['obj'], problem['con1'], #problem['con2'])) # Calculate gradient J = problem.calc_gradient(param_list, unknown_list, return_format='dict') objective = self.get_objectives() constraints = self.get_constraints() for key1 in objective_names: for key2 in param_list: grad = J[key1][key2] * objective[key1] new_val = params[key2] - self.alpha * grad # Set parameter self.set_param(key2, new_val) self.violated = [] for name, val in constraints.items(): if np.linalg.norm(val) > 0.0: self.violated.append(name) itercount += 1
class SolverBase(object): """ Common base class for Linear and Nonlinear solver. Should not be used by users. Always inherit from `LinearSolver` or `NonlinearSolver`.""" def __init__(self): self.iter_count = 0 self.options = OptionsDictionary() desc = 'Set to 0 to disable printing, set to 1 to print the ' \ 'residual to stdout each iteration, set to 2 to print ' \ 'subiteration residuals as well.' self.options.add_option('iprint', 0, values=[0, 1, 2], desc=desc) self.recorders = [] self.local_meta = None def setup(self, sub): """ Solvers override to define post-setup initiailzation. Args ---- sub: `System` System that owns this solver. """ pass def print_norm(self, solver_string, metadata, iteration, res, res0, msg=None, indent=0, solver='NL'): """ Prints out the norm of the residual in a neat readable format. Args ---- solver_string: string Unique string to identify your solver type (e.g., 'LN_GS' or 'NEWTON'). metadata: dict OpenMDAO execution metadata containing iteration info. iteration: int Current iteration number res: float Absolute residual value. res0: float Baseline initial residual for relative comparison. msg: string, optional Message that indicates convergence. ident: int, optional Additional indentation levels for subiterations. solver: string, optional Solver type if not LN or NL (mostly for line search operations.) """ name = metadata['name'] # Find indentation level level = sum( len(item) for item in metadata['coord'] if not isinstance(item, str)) # No indentation for driver; top solver is no indentation. level = level + indent - 2 indent = ' ' * level if msg is not None: form = indent + '[%s] %s: %s %d | %s' print(form % (name, solver, solver_string, iteration, msg)) return form = indent + '[%s] %s: %s %d | %.9g %.9g' print(form % (name, solver, solver_string, iteration, res, res / res0))
class SolverBase(object): """ Common base class for Linear and Nonlinear solver. Should not be used by users. Always inherit from `LinearSolver` or `NonlinearSolver`.""" def __init__(self): self.iter_count = 0 self.options = OptionsDictionary() desc = 'Set to 0 to disable printing, set to 1 to print the ' \ 'residual to stdout each iteration, set to 2 to print ' \ 'subiteration residuals as well.' self.options.add_option('iprint', 0, values=[0, 1, 2], desc=desc) self.recorders = [] self.local_meta = None def setup(self, sub): """ Solvers override to define post-setup initiailzation. Args ---- sub: `System` System that owns this solver. """ pass def print_norm(self, solver_string, metadata, iteration, res, res0, msg=None, indent=0, solver='NL'): """ Prints out the norm of the residual in a neat readable format. Args ---- solver_string: string Unique string to identify your solver type (e.g., 'LN_GS' or 'NEWTON'). metadata: dict OpenMDAO execution metadata containing iteration info. iteration: int Current iteration number res: float Absolute residual value. res0: float Baseline initial residual for relative comparison. msg: string, optional Message that indicates convergence. ident: int, optional Additional indentation levels for subiterations. solver: string, optional Solver type if not LN or NL (mostly for line search operations.) """ name = metadata['name'] # Find indentation level level = sum(len(item) for item in metadata['coord'] if not isinstance(item, str)) # No indentation for driver; top solver is no indentation. level = level + indent - 2 indent = ' ' * level if msg is not None: form = indent + '[%s] %s: %s %d | %s' print(form % (name, solver, solver_string, iteration, msg)) return form = indent + '[%s] %s: %s %d | %.9g %.9g' print(form % (name, solver, solver_string, iteration, res, res/res0))
class Driver(object): """ Base class for drivers in OpenMDAO. Drivers can only be placed in a Problem, and every problem has a Driver. Driver is the simplest driver that runs (solves using solve_nonlinear) a problem once. """ def __init__(self): super(Driver, self).__init__() self.recorders = RecordingManager() # What this driver supports self.supports = OptionsDictionary(read_only=True) self.supports.add_option("inequality_constraints", True) self.supports.add_option("equality_constraints", True) self.supports.add_option("linear_constraints", True) self.supports.add_option("multiple_objectives", True) self.supports.add_option("two_sided_constraints", True) self.supports.add_option("integer_design_vars", True) # This driver's options self.options = OptionsDictionary() self._desvars = OrderedDict() self._objs = OrderedDict() self._cons = OrderedDict() self._voi_sets = [] self._vars_to_record = None # We take root during setup self.root = None self.iter_count = 0 def _setup(self, root): """ Updates metadata for params, constraints and objectives, and check for errors. Also determines all variables that need to be gathered for case recording. """ self.root = root desvars = OrderedDict() objs = OrderedDict() cons = OrderedDict() item_tups = [ ("Parameter", self._desvars, desvars), ("Objective", self._objs, objs), ("Constraint", self._cons, cons), ] for item_name, item, newitem in item_tups: for name, meta in iteritems(item): rootmeta = root.unknowns.metadata(name) if MPI and "src_indices" in rootmeta: # pragma: no cover raise ValueError( "'%s' is a distributed variable and may " "not be used as a design var, objective, " "or constraint." % name ) # Check validity of variable if name not in root.unknowns: msg = "{} '{}' not found in unknowns." msg = msg.format(item_name, name) raise ValueError(msg) # Size is useful metadata to save if "indices" in meta: meta["size"] = len(meta["indices"]) else: meta["size"] = rootmeta["size"] newitem[name] = meta self._desvars = desvars self._objs = objs self._cons = cons def _map_voi_indices(self): poi_indices = {} qoi_indices = {} for name, meta in chain(iteritems(self._cons), iteritems(self._objs)): # set indices of interest if "indices" in meta: qoi_indices[name] = meta["indices"] for name, meta in iteritems(self._desvars): # set indices of interest if "indices" in meta: poi_indices[name] = meta["indices"] return poi_indices, qoi_indices def _of_interest(self, voi_list): """Return a list of tuples, with the given voi_list organized into tuples based on the previously defined grouping of VOIs. """ vois = [] remaining = set(voi_list) for voi_set in self._voi_sets: vois.append([]) for i, voi_set in enumerate(self._voi_sets): for v in voi_list: if v in voi_set: vois[i].append(v) remaining.remove(v) vois = [tuple(x) for x in vois if x] for v in voi_list: if v in remaining: vois.append((v,)) return vois def desvars_of_interest(self): """ Returns ------- list of tuples of str The list of design vars, organized into tuples according to previously defined VOI groups. """ return self._of_interest(self._desvars) def outputs_of_interest(self): """ Returns ------- list of tuples of str The list of constraints and objectives, organized into tuples according to previously defined VOI groups. """ return self._of_interest(list(chain(self._objs, self._cons))) def parallel_derivs(self, vnames): """ Specifies that the named variables of interest are to be grouped together so that their derivatives can be solved for concurrently. Args ---- vnames : iter of str The names of variables of interest that are to be grouped. """ # make sure all vnames are desvars, constraints, or objectives found = set() for n in vnames: if not (n in self._desvars or n in self._objs or n in self._cons): raise RuntimeError("'%s' is not a param, objective, or " "constraint" % n) for grp in self._voi_sets: for vname in vnames: if vname in grp: msg = "'%s' cannot be added to VOI set %s because it " + "already exists in VOI set: %s" raise RuntimeError(msg % (vname, tuple(vnames), grp)) param_intsect = set(vnames).intersection(self._desvars.keys()) if param_intsect and len(param_intsect) != len(vnames): raise RuntimeError( "%s cannot be grouped because %s are design " "vars and %s are not." % (vnames, list(param_intsect), list(set(vnames).difference(param_intsect))) ) if MPI: # pragma: no cover self._voi_sets.append(tuple(vnames)) else: warnings.warn("parallel derivs %s specified but not running under MPI") def add_recorder(self, recorder): """ Adds a recorder to the driver. Args ---- recorder : BaseRecorder A recorder instance. """ self.recorders.append(recorder) def add_desvar(self, name, low=None, high=None, indices=None, adder=0.0, scaler=1.0): """ Adds a parameter to this driver. Args ---- name : string Name of the IndepVarComp in the root system. low : float or ndarray, optional Lower boundary for the param high : upper or ndarray, optional Lower boundary for the param indices : iter of int, optional If a param is an array, these indicate which entries are of interest for derivatives. adder : float or ndarray, optional Value to add to the model value to get the scaled value. Adder is first in precedence. scaler : float or ndarray, optional value to multiply the model value to get the scaled value. Scaler is second in precedence. """ if low is None: low = -1e99 elif isinstance(low, np.ndarray): low = low.flatten() if high is None: high = 1e99 elif isinstance(high, np.ndarray): high = high.flatten() if isinstance(adder, np.ndarray): adder = adder.flatten() if isinstance(scaler, np.ndarray): scaler = scaler.flatten() # Scale the low and high values low = (low + adder) * scaler high = (high + adder) * scaler param = {} param["low"] = low param["high"] = high param["adder"] = adder param["scaler"] = scaler if indices: param["indices"] = np.array(indices, dtype=int) self._desvars[name] = param def add_param(self, name, low=None, high=None, indices=None, adder=0.0, scaler=1.0): """ Deprecated. Use ``add_desvar`` instead. """ warnings.simplefilter("always", DeprecationWarning) warnings.warn("Driver.add_param() is deprecated. Use add_desvar() instead.", DeprecationWarning, stacklevel=2) warnings.simplefilter("ignore", DeprecationWarning) self.add_desvar(name, low=low, high=high, indices=indices, adder=adder, scaler=scaler) def get_desvars(self): """ Returns a dict of possibly distributed parameters. Returns ------- dict Keys are the param object names, and the values are the param values. """ uvec = self.root.unknowns desvars = OrderedDict() for key, meta in iteritems(self._desvars): desvars[key] = self._get_distrib_var(key, meta, "design var") return desvars def _get_distrib_var(self, name, meta, voi_type): uvec = self.root.unknowns comm = self.root.comm nproc = comm.size iproc = comm.rank if nproc > 1: owner = self.root._owning_ranks[name] if iproc == owner: flatval = uvec.flat[name] else: flatval = None else: owner = 0 flatval = uvec.flat[name] if "indices" in meta and not (nproc > 1 and owner != iproc): # Make sure our indices are valid try: flatval = flatval[meta["indices"]] except IndexError: msg = "Index for {} '{}' is out of bounds. " msg += "Requested index: {}, " msg += "shape: {}." raise IndexError(msg.format(voi_type, name, meta["indices"], uvec.metadata(name)["shape"])) if nproc > 1: flatval = comm.bcast(flatval, root=owner) scaler = meta["scaler"] adder = meta["adder"] if isinstance(scaler, np.ndarray) or isinstance(adder, np.ndarray) or scaler != 1.0 or adder != 0.0: return (flatval + adder) * scaler else: return flatval def get_desvar_metadata(self): """ Returns a dict of parameter metadata. Returns ------- dict Keys are the param object names, and the values are the param values. """ return self._desvars def set_desvar(self, name, value): """ Sets a parameter. Args ---- name : string Name of the IndepVarComp in the root system. val : ndarray or float value to set the parameter """ if self.root.unknowns.flat[name].size == 0: return scaler = self._desvars[name]["scaler"] adder = self._desvars[name]["adder"] if isinstance(scaler, np.ndarray) or isinstance(adder, np.ndarray) or scaler != 1.0 or adder != 0.0: value = value / scaler - adder else: value = value # Only set the indices we requested when we set the parameter. idx = self._desvars[name].get("indices") if idx is not None: self.root.unknowns[name][idx] = value else: self.root.unknowns[name] = value def add_objective(self, name, indices=None, adder=0.0, scaler=1.0): """ Adds an objective to this driver. Args ---- name : string Promoted pathname of the output that will serve as the objective. indices : iter of int, optional If an objective is an array, these indicate which entries are of interest for derivatives. adder : float or ndarray, optional Value to add to the model value to get the scaled value. Adder is first in precedence. scaler : float or ndarray, optional value to multiply the model value to get the scaled value. Scaler is second in precedence. """ if isinstance(adder, np.ndarray): adder = adder.flatten() if isinstance(scaler, np.ndarray): scaler = scaler.flatten() obj = {} obj["adder"] = adder obj["scaler"] = scaler if indices: obj["indices"] = indices if len(indices) > 1 and not self.supports["multiple_objectives"]: raise RuntimeError( "Multiple objective indices specified for " "variable '%s', but driver '%s' doesn't " "support multiple objectives." % (name, self.pathname) ) self._objs[name] = obj def get_objectives(self, return_type="dict"): """ Gets all objectives of this driver. Args ---- return_type : string Set to 'dict' to return a dictionary, or set to 'array' to return a flat ndarray. Returns ------- dict (for return_type 'dict') Key is the objective name string, value is an ndarray with the values. ndarray (for return_type 'array') Array containing all objective values in the order they were added. """ uvec = self.root.unknowns objs = OrderedDict() for key, meta in iteritems(self._objs): objs[key] = self._get_distrib_var(key, meta, "objective") return objs def add_constraint( self, name, lower=None, upper=None, equals=None, linear=False, jacs=None, indices=None, adder=0.0, scaler=1.0 ): """ Adds a constraint to this driver. For inequality constraints, `lower` or `upper` must be specified. For equality constraints, `equals` must be specified. Args ---- name : string Promoted pathname of the output that will serve as the quantity to constrain. lower : float or ndarray, optional Constrain the quantity to be greater than this value. upper : float or ndarray, optional Constrain the quantity to be less than this value. equals : float or ndarray, optional Constrain the quantity to be equal to this value. linear : bool, optional Set to True if this constraint is linear with respect to all design variables so that it can be calculated once and cached. jacs : dict of functions, optional Dictionary of user-defined functions that return the flattened Jacobian of this constraint with repsect to the design vars of this driver, as indicated by the dictionary keys. Default is None to let OpenMDAO calculate all derivatives. Note, this is currently unsupported indices : iter of int, optional If a constraint is an array, these indicate which entries are of interest for derivatives. adder : float or ndarray, optional Value to add to the model value to get the scaled value. Adder is first in precedence. scaler : float or ndarray, optional value to multiply the model value to get the scaled value. Scaler is second in precedence. """ if equals is not None and (lower is not None or upper is not None): msg = "Constraint '{}' cannot be both equality and inequality." raise RuntimeError(msg.format(name)) if equals is not None and self.supports["equality_constraints"] is False: msg = "Driver does not support equality constraint '{}'." raise RuntimeError(msg.format(name)) if equals is None and self.supports["inequality_constraints"] is False: msg = "Driver does not support inequality constraint '{}'." raise RuntimeError(msg.format(name)) if lower is not None and upper is not None and self.supports["two_sided_constraints"] is False: msg = "Driver does not support 2-sided constraint '{}'." raise RuntimeError(msg.format(name)) if lower is None and upper is None and equals is None: msg = "Constraint '{}' needs to define lower, upper, or equals." raise RuntimeError(msg.format(name)) if isinstance(scaler, np.ndarray): scaler = scaler.flatten() if isinstance(adder, np.ndarray): adder = adder.flatten() if isinstance(lower, np.ndarray): lower = lower.flatten() if isinstance(upper, np.ndarray): upper = upper.flatten() if isinstance(equals, np.ndarray): equals = equals.flatten() con = {} con["lower"] = lower con["upper"] = upper con["equals"] = equals con["linear"] = linear con["adder"] = adder con["scaler"] = scaler con["jacs"] = jacs if indices: con["indices"] = indices self._cons[name] = con def get_constraints(self, ctype="all", lintype="all"): """ Gets all constraints for this driver. Args ---- ctype : string Default is 'all'. Optionally return just the inequality constraints with 'ineq' or the equality constraints with 'eq'. lintype : string Default is 'all'. Optionally return just the linear constraints with 'linear' or the nonlinear constraints with 'nonlinear'. Returns ------- dict Key is the constraint name string, value is an ndarray with the values. """ uvec = self.root.unknowns cons = OrderedDict() for key, meta in iteritems(self._cons): if lintype == "linear" and meta["linear"] == False: continue if lintype == "nonlinear" and meta["linear"]: continue if ctype == "eq" and meta["equals"] is None: continue if ctype == "ineq" and meta["equals"] is not None: continue scaler = meta["scaler"] adder = meta["adder"] cons[key] = self._get_distrib_var(key, meta, "constraint") return cons def get_constraint_metadata(self): """ Returns a dict of constraint metadata. Returns ------- dict Keys are the constraint object names, and the values are the param values. """ return self._cons def run(self, problem): """ Runs the driver. This function should be overriden when inheriting. Args ---- problem : `Problem` Our parent `Problem`. """ system = problem.root # Metadata Setup self.iter_count += 1 metadata = create_local_meta(None, "Driver") system.ln_solver.local_meta = metadata update_local_meta(metadata, (self.iter_count,)) # Solve the system once and record results. system.solve_nonlinear(metadata=metadata) self.recorders.record(system, metadata) def generate_docstring(self): """ Generates a numpy-style docstring for a user-created Driver class. Returns ------- docstring : str string that contains a basic numpy docstring. """ # start the docstring off docstring = ' """\n' # Put options into docstring from openmdao.core.options import OptionsDictionary firstTime = 1 # for py3.4, items from vars must come out in same order. v = OrderedDict(sorted(vars(self).items())) for key, value in v.items(): if type(value) == OptionsDictionary: if key == "supports": continue if firstTime: # start of Options docstring docstring += "\n Options\n -------\n" firstTime = 0 for (name, val) in sorted(value.items()): docstring += " " + key + "['" docstring += name + "']" docstring += " : " + type(val).__name__ docstring += "(" if type(val).__name__ == "str": docstring += "'" docstring += str(val) if type(val).__name__ == "str": docstring += "'" docstring += ")\n" desc = value._options[name]["desc"] if desc: docstring += " " + desc + "\n" # finish up docstring docstring += '\n """\n' return docstring
class MySimpleDriver(Driver): def __init__(self): super(MySimpleDriver, self).__init__() # What we support self.supports['inequality_constraints'] = True self.supports['equality_constraints'] = False self.supports['linear_constraints'] = False self.supports['multiple_objectives'] = False # My driver options self.options = OptionsDictionary() self.options.add_option('tol', 1e-4) self.options.add_option('maxiter', 10) self.alpha = .01 self.violated = [] def run(self, problem): """ Mimic a very simplistic unconstrained optimization.""" # Get dicts with pointers to our vectors params = self.get_desvars() objective = self.get_objectives() constraints = self.get_constraints() indep_list = params.keys() objective_names = list(objective.keys()) constraint_names = list(constraints.keys()) unknown_list = objective_names + constraint_names itercount = 0 while itercount < self.options['maxiter']: # Run the model problem.root.solve_nonlinear() #print('z1: %f, z2: %f, x1: %f, y1: %f, y2: %f' % (problem['z'][0], #problem['z'][1], #problem['x'], #problem['y1'], #problem['y2'])) #print('obj: %f, con1: %f, con2: %f' % (problem['obj'], problem['con1'], #problem['con2'])) # Calculate gradient J = problem.calc_gradient(indep_list, unknown_list, return_format='dict') objective = self.get_objectives() constraints = self.get_constraints() for key1 in objective_names: for key2 in indep_list: grad = J[key1][key2] * objective[key1] new_val = params[key2] - self.alpha*grad # Set parameter self.set_desvar(key2, new_val) self.violated = [] for name, val in constraints.items(): if np.linalg.norm(val) > 0.0: self.violated.append(name) itercount += 1
class Driver(object): """ Base class for drivers in OpenMDAO. Drivers can only be placed in a Problem, and every problem has a Driver. Driver is the simplest driver that runs (solves using solve_nonlinear) a problem once. """ def __init__(self): super(Driver, self).__init__() self.recorders = [] # What this driver supports self.supports = OptionsDictionary(read_only=True) self.supports.add_option('inequality_constraints', True) self.supports.add_option('equality_constraints', True) self.supports.add_option('linear_constraints', False) self.supports.add_option('multiple_objectives', False) self.supports.add_option('two_sided_constraints', False) self.supports.add_option('integer_parameters', False) # This driver's options self.options = OptionsDictionary() self._params = OrderedDict() self._objs = OrderedDict() self._cons = OrderedDict() self._voi_sets = [] # We take root during setup self.root = None self.iter_count = 0 def _setup(self, root): """ Prepares some things we need.""" self.root = root item_names = ['Parameter', 'Objective', 'Constraint'] items = [self._params, self._objs, self._cons] for item, item_name in zip(items, item_names): for name, meta in item.items(): # Check validity of variable if name not in root.unknowns: msg = "{} '{}' not found in unknowns." msg = msg.format(item_name, name) raise ValueError(msg) # Size is useful metadata to save if 'indices' in meta: meta['size'] = len(meta['indices']) else: meta['size'] = root.unknowns.metadata(name)['size'] def _map_voi_indices(self): poi_indices = {} qoi_indices = {} for name, meta in chain(self._cons.items(), self._objs.items()): # set indices of interest if 'indices' in meta: qoi_indices[name] = meta['indices'] for name, meta in self._params.items(): # set indices of interest if 'indices' in meta: poi_indices[name] = meta['indices'] return poi_indices, qoi_indices def _of_interest(self, voi_list): """Return a list of tuples, with the given voi_list organized into tuples based on the previously defined grouping of VOIs. """ vois = [] done_sets = set() for v in voi_list: for voi_set in self._voi_sets: if voi_set in done_sets: break if v in voi_set: vois.append(tuple([x for x in voi_set if x in voi_list])) done_sets.add(voi_set) break else: vois.append((v,)) return vois def params_of_interest(self): """ Returns ------- list of tuples of str The list of params, organized into tuples according to previously defined VOI groups. """ return self._of_interest(self._params) def outputs_of_interest(self): """ Returns ------- list of tuples of str The list of constraints and objectives, organized into tuples according to previously defined VOI groups. """ return self._of_interest(list(chain(self._objs, self._cons))) def parallel_derivs(self, vnames): """ Specifies that the named variables of interest are to be grouped together so that their derivatives can be solved for concurrently. Args ---- vnames : iter of str The names of variables of interest that are to be grouped. """ for grp in self._voi_sets: for vname in vnames: if vname in grp: msg = "'%s' cannot be added to VOI set %s because it " + \ "already exists in VOI set: %s" raise RuntimeError(msg % (vname, tuple(vnames), grp)) param_intsect = set(vnames).intersection(self._params.keys()) if param_intsect and len(param_intsect) != len(vnames): raise RuntimeError("%s cannot be grouped because %s are params and %s are not." % (vnames, list(param_intsect), list(set(vnames).difference(param_intsect)))) self._voi_sets.append(tuple(vnames)) def add_recorder(self, recorder): """ Adds a recorder to the driver. Args ---- recorder : BaseRecorder A recorder instance. """ self.recorders.append(recorder) def add_param(self, name, low=None, high=None, indices=None, adder=0.0, scaler=1.0): """ Adds a parameter to this driver. Args ---- name : string Name of the paramcomp in the root system. low : float or ndarray, optional Lower boundary for the param high : upper or ndarray, optional Lower boundary for the param indices : iter of int, optional If a param is an array, these indicate which entries are of interest for derivatives. adder : float or ndarray, optional Value to add to the model value to get the scaled value. Adder is first in precedence. scaler : float or ndarray, optional value to multiply the model value to get the scaled value. Scaler is second in precedence. """ if low is None: low = -1e99 elif isinstance(low, np.ndarray): low = low.flatten() if high is None: high = 1e99 elif isinstance(high, np.ndarray): high = high.flatten() if isinstance(adder, np.ndarray): adder = adder.flatten() if isinstance(scaler, np.ndarray): scaler = scaler.flatten() # Scale the low and high values low = (low + adder)*scaler high = (high + adder)*scaler param = {} param['low'] = low param['high'] = high param['adder'] = adder param['scaler'] = scaler if indices: param['indices'] = np.array(indices, dtype=int) self._params[name] = param def get_params(self): """ Returns a dict of parameters. Returns ------- dict Keys are the param object names, and the values are the param values. """ uvec = self.root.unknowns params = OrderedDict() for key, meta in self._params.items(): scaler = meta['scaler'] adder = meta['adder'] flatval = uvec.flat[key] if 'indices' in meta: # Make sure our indices are valid try: flatval = flatval[meta['indices']] except IndexError: msg = "Index for parameter '{}' is out of bounds. " msg += "Requested index: {}, " msg += "Parameter shape: {}." raise IndexError(msg.format(key, meta['indices'], uvec.metadata(key)['shape'])) if isinstance(scaler, np.ndarray) or isinstance(adder, np.ndarray) \ or scaler != 1.0 or adder != 0.0: params[key] = (flatval + adder)*scaler else: params[key] = flatval return params def get_param_metadata(self): """ Returns a dict of parameter metadata. Returns ------- dict Keys are the param object names, and the values are the param values. """ return self._params def set_param(self, name, value): """ Sets a parameter. Args ---- name : string Name of the paramcomp in the root system. val : ndarray or float value to set the parameter """ scaler = self._params[name]['scaler'] adder = self._params[name]['adder'] if isinstance(scaler, np.ndarray) or isinstance(adder, np.ndarray) \ or scaler != 0.0 or adder != 1.0: self.root.unknowns[name] = value/scaler - adder else: self.root.unknowns[name] = value def add_objective(self, name, indices=None, adder=0.0, scaler=1.0): """ Adds an objective to this driver. Args ---- name : string Promoted pathname of the output that will serve as the objective. indices : iter of int, optional If an objective is an array, these indicate which entries are of interest for derivatives. adder : float or ndarray, optional Value to add to the model value to get the scaled value. Adder is first in precedence. scaler : float or ndarray, optional value to multiply the model value to get the scaled value. Scaler is second in precedence. """ if isinstance(adder, np.ndarray): adder = adder.flatten() if isinstance(scaler, np.ndarray): scaler = scaler.flatten() obj = {} obj['adder'] = adder obj['scaler'] = scaler if indices: obj['indices'] = indices if len(indices) > 1 and not self.supports['multiple_objectives']: raise RuntimeError("Multiple objective indices specified for " "variable '%s', but driver '%s' doesn't " "support multiple objectives." % (name, self.pathname)) self._objs[name] = obj def get_objectives(self, return_type='dict'): """ Gets all objectives of this driver. Args ---- return_type : string Set to 'dict' to return a dictionary, or set to 'array' to return a flat ndarray. Returns ------- dict (for return_type 'dict') Key is the objective name string, value is an ndarray with the values. ndarray (for return_type 'array') Array containing all objective values in the order they were added. """ uvec = self.root.unknowns objs = OrderedDict() for key, meta in self._objs.items(): scaler = meta['scaler'] adder = meta['adder'] flatval = uvec.flat[key] if 'indices' in meta: # Make sure our indices are valid try: flatval = flatval[meta['indices']] except IndexError: msg = "Index for objective '{}' is out of bounds. " msg += "Requested index: {}, " msg += "Parameter shape: {}." raise IndexError(msg.format(key, meta['indices'], uvec.metadata(key)['shape'])) if isinstance(scaler, np.ndarray) or isinstance(adder, np.ndarray) \ or adder != 0.0 or scaler != 1.0: objs[key] = (flatval + adder)*scaler else: objs[key] = flatval return objs def add_constraint(self, name, ctype='ineq', linear=False, jacs=None, indices=None, adder=0.0, scaler=1.0): """ Adds a constraint to this driver. Args ---- name : string Promoted pathname of the output that will serve as the objective. ctype : string Set to 'ineq' for inequality constraints, or 'eq' for equality constraints. Make sure your driver supports the ctype of constraint that you are adding. linear : bool, optional Set to True if this constraint is linear with respect to all params so that it can be calculated once and cached. jacs : dict of functions, optional Dictionary of user-defined functions that return the flattened Jacobian of this constraint with repsect to the params of this driver, as indicated by the dictionary keys. Default is None to let OpenMDAO calculate all derivatives. Note, this is currently unsupported indices : iter of int, optional If a constraint is an array, these indicate which entries are of interest for derivatives. adder : float or ndarray, optional Value to add to the model value to get the scaled value. Adder is first in precedence. scaler : float or ndarray, optional value to multiply the model value to get the scaled value. Scaler is second in precedence. """ if ctype == 'eq' and self.supports['equality_constraints'] is False: msg = "Driver does not support equality constraint '{}'." raise RuntimeError(msg.format(name)) if ctype == 'ineq' and self.supports['inequality_constraints'] is False: msg = "Driver does not support inequality constraint '{}'." raise RuntimeError(msg.format(name)) if isinstance(adder, np.ndarray): adder = adder.flatten() if isinstance(scaler, np.ndarray): scaler = scaler.flatten() con = {} con['linear'] = linear con['ctype'] = ctype con['adder'] = adder con['scaler'] = scaler con['jacs'] = jacs if indices: con['indices'] = indices self._cons[name] = con def get_constraints(self, ctype='all', lintype='all'): """ Gets all constraints for this driver. Args ---- ctype : string Default is 'all'. Optionally return just the inequality constraints with 'ineq' or the equality constraints with 'eq'. lintype : string Default is 'all'. Optionally return just the linear constraints with 'linear' or the nonlinear constraints with 'nonlinear'. Returns ------- dict Key is the constraint name string, value is an ndarray with the values. """ uvec = self.root.unknowns cons = OrderedDict() for key, meta in self._cons.items(): if lintype == 'linear' and meta['linear'] == False: continue if lintype == 'nonlinear' and meta['linear'] == True: continue if ctype == 'eq' and meta['ctype'] == 'ineq': continue if ctype == 'ineq' and meta['ctype'] == 'eq': continue scaler = meta['scaler'] adder = meta['adder'] flatval = uvec.flat[key] if 'indices' in meta: # Make sure our indices are valid try: flatval = flatval[meta['indices']] except IndexError: msg = "Index for constraint '{}' is out of bounds. " msg += "Requested index: {}, " msg += "Parameter shape: {}." raise IndexError(msg.format(key, meta['indices'], uvec.metadata(key)['shape'])) if isinstance(scaler, np.ndarray) or isinstance(adder, np.ndarray) \ or adder != 0.0 or scaler != 1.0: cons[key] = (flatval + adder)*scaler else: cons[key] = flatval return cons def get_constraint_metadata(self): """ Returns a dict of constraint metadata. Returns ------- dict Keys are the constraint object names, and the values are the param values. """ return self._cons def run(self, problem): """ Runs the driver. This function should be overriden when inheriting. Args ---- problem : `Problem` Our parent `Problem`. """ system = problem.root # Metadata Setup self.iter_count += 1 metadata = create_local_meta(None, 'Driver') system.ln_solver.local_meta = metadata update_local_meta(metadata, (self.iter_count,)) # Solve the system once and record results. system.solve_nonlinear(metadata=metadata) for recorder in self.recorders: recorder.raw_record(system.params, system.unknowns, system.resids, metadata)
class SolverBase(object): """ Common base class for Linear and Nonlinear solver. Should not be used by users. Always inherit from one of the subclasses.""" def __init__(self): self.iter_count = 0 self.options = OptionsDictionary() desc = ( "Set to 0 to disable printing, set to 1 to print the " "residual to stdout each iteration, set to 2 to print " "subiteration residuals as well." ) self.options.add_option("iprint", 0, values=[0, 1, 2], desc=desc) self.recorders = [] self.local_meta = None def print_norm(self, solver_string, metadata, iteration, res, res0, msg=None, indent=0, solver="NL"): """ Prints out the norm of the residual in a neat readable format. Args ---- solver_string: string Unique string to identify your solver type (e.g., 'LN_GS' or 'NEWTON'). metadata: dict OpenMDAO execution metadata containing iteration info. iteration: int Current iteration number res: float Absolute residual value. res0: float Baseline initial residual for relative comparison. msg: string, optional Message that indicates convergence. ident: int Additional indentation levels for subiterations. solver: string Solver type if not LN or NL (mostly for line search operations.) """ name = metadata["name"] # Find indentation level level = sum(len(item) for item in metadata["coord"] if not isinstance(item, str)) # No indentation for driver; top solver is no indentation. level = level + indent - 2 indent = " " * level if msg is not None: form = indent + "[%s] %s: %s %d | %s" print(form % (name, solver, solver_string, iteration, msg)) return form = indent + "[%s] %s: %s %d | %.9g %.9g" print(form % (name, solver, solver_string, iteration, res, res / res0))
class BaseRecorder(object): """ Base class for all case recorders. """ def __init__(self): self.options = OptionsDictionary() self.options.add_option( 'includes', ['*'], desc='Patterns for variables to include in recording') self.options.add_option( 'excludes', [], desc='Patterns for variables to exclude from recording ' '(processed after includes)') self.out = None self._filtered = {} # TODO: System specific includes/excludes def startup(self, group): """ Prepare for a new run. Args ---- group : `Group` Group that owns this recorder. """ # Compute the inclusion lists for recording params = list(filter(self._check_path, group.params)) unknowns = list(filter(self._check_path, group.unknowns)) resids = list(filter(self._check_path, group.resids)) self._filtered[group.pathname] = (params, unknowns, resids) def _check_path(self, path): """ Return True if `path` should be recorded. """ includes = self.options['includes'] excludes = self.options['excludes'] # First see if it's included for pattern in includes: if fnmatch(path, pattern): # We found a match. Check to see if it is excluded. for ex_pattern in excludes: if fnmatch(path, ex_pattern): return False return True # Did not match anything in includes. return False def raw_record(self, params, unknowns, resids, metadata): """ This is the method that drivers and solvers will call during their execution to record their current state. This method is responsible for filtering the provided data to reflect the includes/excludes provided by the user and then calling `self.record`. Recorder subclasses should override `record`, altering this function should not be necessary. Args ---- params : `VecWrapper` `VecWrapper` containing parameters. (p) unknowns : `VecWrapper` `VecWrapper` containing outputs and states. (u) resids : `VecWrapper` `VecWrapper` containing residuals. (r) metadata : dict Dictionary containing execution metadata (e.g. iteration coordinate). """ # Coord will look like ['Driver', (1,), 'root', (1,), 'G1', (1,1), ...] # So the pathname is every other entry, starting with the fifth. pathname = '.'.join(metadata['coord'][4::2]) pnames, unames, rnames = self._filtered[pathname] filtered_params = {key: params[key] for key in pnames} filtered_unknowns = {key: unknowns[key] for key in unames} filtered_resids = {key: resids[key] for key in rnames} self.record(filtered_params, filtered_unknowns, filtered_resids, metadata) def record(self, params, unknowns, resids, metadata): """ Records the requested variables. This method must be defined in all recorders. Args ---- params : `VecWrapper` `VecWrapper` containing parameters. (p) unknowns : `VecWrapper` `VecWrapper` containing outputs and states. (u) resids : `VecWrapper` `VecWrapper` containing residuals. (r) metadata : dict Dictionary containing execution metadata (e.g. iteration coordinate). """ raise NotImplementedError("record") def close(self): """Closes `out` unless it's ``sys.stdout``, ``sys.stderr``, or StringIO. Note that a closed recorder will do nothing in :meth:`record`.""" # Closing a StringIO deletes its contents. if self.out not in (None, sys.stdout, sys.stderr): if not isinstance(self.out, StringIO): self.out.close() self.out = None