def line_search(self): """ Return the current linesearch object. """ warn_deprecation("The 'line_search' attribute provides backwards compatibility " "with OpenMDAO 1.x ; use 'linesearch' instead.") return self.linesearch
def factorial(*args): """ Raise a warning stating that the factorial function is deprecated. """ warn_deprecation("The 'factorial' function is deprecated. " "It is no longer supported for SciPy versions >= 1.5.") return scipy.special.factorial(*args)
def _setup_solvers(self, system, depth): """ Assign system instance, set depth, and optionally perform setup. Parameters ---------- system : System pointer to the owning system. depth : int depth of the current system (already incremented). """ super(NewtonSolver, self)._setup_solvers(system, depth) self._disallow_discrete_outputs() if self.linear_solver is not None: self.linear_solver._setup_solvers(self._system, self._depth + 1) else: self.linear_solver = system.linear_solver if self.linesearch is not None: self.linesearch._setup_solvers(self._system, self._depth + 1) else: # In OpenMDAO 3.x, we will be making BoundsEnforceLS the default line search. # This deprecation warning is to prepare users for the change. pathname = self._system.pathname if pathname: pathname += ': ' msg = 'Deprecation warning: In V 3.0, the default Newton solver setup will change ' + \ 'to use the BoundsEnforceLS line search.' warn_deprecation(pathname + msg)
def check_mpi_env(): """ Determine if the environment variable governing MPI usage is set. Returns ------- bool True if MPI is required, False if it's to be skipped, None if not set. """ if 'OPENMDAO_REQUIRE_MPI' in os.environ: warn_deprecation("Set OPENMDAO_USE_MPI instead of OPENMDAO_REQUIRE_MPI.") mpi_selection = os.environ.get('OPENMDAO_USE_MPI', os.environ.get('OPENMDAO_REQUIRE_MPI', None)) # If OPENMDAO_USE_MPI is set to a postive value, the run will fail # immediately if the import fails if str(mpi_selection).lower() in ['always', '1', 'true', 'yes', 'y', 'on']: return True # If set to something else, no import is attempted. if mpi_selection is not None: return False # If unset, the import will be attempted but give no warning if it fails. return None
def __setitem__(self, name, value): """ Set an option in the local dictionary. Parameters ---------- name : str name of the option. value : - value of the option to be value- and type-checked if declared. """ try: meta = self._dict[name] except KeyError: # The key must have been declared. msg = "Option '{}' cannot be set because it has not been declared." self._raise(msg.format(name), exc_type=KeyError) if meta['deprecation'] is not None and name not in self._deprecation_warning_issued: warn_deprecation(meta['deprecation']) self._deprecation_warning_issued.append(name) if self._read_only: self._raise("Tried to set read-only option '{}'.".format(name), exc_type=KeyError) self._assert_valid(name, value) meta['value'] = value meta['has_been_set'] = True
def __getitem__(self, name): """ Get an option from the dict or declared default. Parameters ---------- name : str name of the option. Returns ------- value : - value of the option. """ # If the option has been set in this system, return the set value try: meta = self._dict[name] if meta['deprecation'] is not None and name not in self._deprecation_warning_issued: warn_deprecation(meta['deprecation']) self._deprecation_warning_issued.append(name) if meta['has_been_set']: return meta['value'] else: self._raise("Option '{}' is required but has not been set.".format(name)) except KeyError: self._raise("Option '{}' cannot be found".format(name), exc_type=KeyError)
def line_search(self, solver): """ Set the linesearch solver. """ warn_deprecation("The 'line_search' attribute provides backwards compatibility " "with OpenMDAO 1.x ; use 'linesearch' instead.") self.linesearch = solver
def _setup_solvers(self, system, depth): """ Assign system instance, set depth, and optionally perform setup. Parameters ---------- system : <System> pointer to the owning system. depth : int depth of the current system (already incremented). """ self._system = weakref.ref(system) self._depth = depth self._solver_info = system._solver_info self._recording_iter = system._recording_iter if system.pathname: parent_name = self.msginfo self.options._parent_name = parent_name self.recording_options._parent_name = parent_name self.supports._parent_name = parent_name if isinstance(self, LinearSolver) and not system._use_derivatives: return self._rec_mgr.startup(self) self._rec_mgr.record_metadata(self) myoutputs = myresiduals = myinputs = [] incl = self.recording_options['includes'] excl = self.recording_options['excludes'] # doesn't matter if we're a linear or nonlinear solver. The names for # inputs, outputs, and residuals are the same for both the 'linear' and 'nonlinear' # vectors. if system.pathname: incl = ['.'.join((system.pathname, i)) for i in incl] excl = ['.'.join((system.pathname, i)) for i in excl] if self.recording_options['record_solver_residuals']: myresiduals = [n for n in system._residuals._views if check_path(n, incl, excl)] if self.recording_options['record_outputs']: myoutputs = [n for n in system._outputs._views if check_path(n, incl, excl)] if self.recording_options['record_inputs']: myinputs = [n for n in system._inputs._views if check_path(n, incl, excl)] self._filtered_vars_to_record = { 'input': myinputs, 'output': myoutputs, 'residual': myresiduals } # Raise a deprecation warning for changed option. if 'err_on_maxiter' in self.options and self.options['err_on_maxiter'] is not None: self.options['err_on_non_converge'] = self.options['err_on_maxiter'] warn_deprecation("The 'err_on_maxiter' option provides backwards compatibility " "with earlier version of OpenMDAO; use options['err_on_non_converge'] " "instead.")
def default_surrogate(self): """ Get the default surrogate for this MetaModel. """ warn_deprecation("The 'default_surrogate' attribute provides backwards compatibility " "with earlier version of OpenMDAO; use options['default_surrogate'] " "instead.") return self.options['default_surrogate']
def default_surrogate(self): """ Get the default surrogate for this MetaModel. """ warn_deprecation("The 'default_surrogate' attribute provides backwards compatibility " "with earlier version of OpenMDAO; use options['default_surrogate'] " "instead.") return self.options['default_surrogate']
def __init__(self, **kwargs): super(RungeKutta, self).__init__(**kwargs) self._rhs_source = 'ode' msg = 'The RungeKutta transcription is deprecated and will be removed in Dymos v1.0.0.\n' \ 'For equivalent behavior, users should switch to ' \ 'GaussLobatto(order=3, solve_segments=True)' warn_deprecation(msg)
def __init__(self, name=None, val=1.0, **kwargs): """ Initialize all attributes. Parameters ---------- name : str or None or [(str, value), ...] or [(str, value, kwargs), ...] name of the variable. If None, variables should be defined external to this class by calling add_output. For backwards compatibility with OpenMDAO v1, this can also be a list of tuples in the case of declaring multiple variables at once. val : float or ndarray value of the variable if a single variable is being defined. **kwargs : dict keyword arguments. """ super(IndepVarComp, self).__init__() self._indep = [] self._indep_external = [] self._indep_external_discrete = [] # A single variable is declared during instantiation if isinstance(name, string_types): self._indep.append((name, val, kwargs)) # Mutiple variables are declared during instantiation (deprecated) elif isinstance(name, collections.Iterable): warn_deprecation( 'Declaring multiple variables in this way is deprecated. ' 'In OpenMDAO 2.x or later, multiple variables should be declared ' 'as separate add_output calls.') # Loop through each variable (i.e., each tuple) for tup in name: # If valid tuple, assign to (name, val, kwargs); otherwise, raise an exception if isinstance(tup, tuple) and len(tup) == 3: name_, val, kwargs = tup elif isinstance(tup, tuple) and len(tup) == 2: name_, val = tup kwargs = {} else: raise ValueError( "IndepVarComp init: arg %s must be a tuple of the " "form (name, value) or (name, value, keyword_dict)." % str(tup)) self._indep.append((name_, val, kwargs)) elif name is None: pass else: raise ValueError( "first argument to IndepVarComp init must be either of type " "`str` or an iterable of tuples of the form (name, value) or " "(name, value, keyword_dict).") for illegal in ('promotes', 'promotes_inputs', 'promotes_outputs'): if illegal in kwargs: raise ValueError("IndepVarComp init: '%s' is not supported " "in IndepVarComp." % illegal)
def __init__(self, name=None, val=1.0, **kwargs): """ Initialize all attributes. Parameters ---------- name : str or None or [(str, value), ...] or [(str, value, kwargs), ...] name of the variable. If None, variables should be defined external to this class by calling add_output. For backwards compatibility with OpenMDAO v1, this can also be a list of tuples in the case of declaring multiple variables at once. val : float or ndarray value of the variable if a single variable is being defined. **kwargs : dict keyword arguments. """ super(IndepVarComp, self).__init__() self._indep = [] self._indep_external = [] self._indep_external_discrete = [] # A single variable is declared during instantiation if isinstance(name, string_types): self._indep.append((name, val, kwargs)) # Mutiple variables are declared during instantiation (deprecated) elif isinstance(name, collections.Iterable): warn_deprecation('Declaring multiple variables in this way is deprecated. ' 'In OpenMDAO 2.x or later, multiple variables should be declared ' 'as separate add_output calls.') # Loop through each variable (i.e., each tuple) for tup in name: # If valid tuple, assign to (name, val, kwargs); otherwise, raise an exception if isinstance(tup, tuple) and len(tup) == 3: name_, val, kwargs = tup elif isinstance(tup, tuple) and len(tup) == 2: name_, val = tup kwargs = {} else: raise ValueError( "IndepVarComp init: arg %s must be a tuple of the " "form (name, value) or (name, value, keyword_dict)." % str(tup)) self._indep.append((name_, val, kwargs)) elif name is None: pass else: raise ValueError( "first argument to IndepVarComp init must be either of type " "`str` or an iterable of tuples of the form (name, value) or " "(name, value, keyword_dict).") for illegal in ('promotes', 'promotes_inputs', 'promotes_outputs'): if illegal in kwargs: raise ValueError("IndepVarComp init: '%s' is not supported " "in IndepVarComp." % illegal)
def __init__(self, **kwargs): """ Initialize attributes. Parameters ---------- **kwargs : dict Named args. """ super(PetscKSP, self).__init__(**kwargs) warn_deprecation('PetscKSP is deprecated. Use PETScKrylov instead.')
def __init__(self, **kwargs): """ Initialize attributes. Parameters ---------- **kwargs : dict Named args. """ super(PetscKSP, self).__init__(**kwargs) warn_deprecation('PetscKSP is deprecated. Use PETScKrylov instead.')
def iteration_coordinate(self): """ Deprecate the 'iteration_coordinate' attribute. Returns ------- str The unique identifier for this case. """ warn_deprecation("'iteration_coordinate' has been deprecated. Use 'name' instead.") return self.name
def preconditioner(self, precon): """ Provide for setting the 'preconditioner' property for backwards compatibility. Parameters ---------- precon : <LinearSolver> reference to a <LinearSolver> to be assigned to the 'precon' property. """ warn_deprecation("The 'preconditioner' property provides backwards compatibility " "with OpenMDAO <= 1.x ; use 'precon' instead.") self.precon = precon
def __init__(self, **kwargs): """ Initialize attributes. Parameters ---------- **kwargs : dict Named args. """ super(ScipyOptimizer, self).__init__(**kwargs) warn_deprecation("'ScipyOptimizer' provides backwards compatibility " "with OpenMDAO <= 2.2 ; use 'ScipyOptimizeDriver' instead.")
def __init__(self, **kwargs): """ Capture Initialize to throw warning. Parameters ---------- **kwargs : dict Deprecated arguments. """ warn_deprecation("'FloatKrigingSurrogate' has been deprecated. Use " "'KrigingSurrogate' instead.") super(FloatKrigingSurrogate, self).__init__(**kwargs)
def __init__(self, **kwargs): """ Initialize attributes. Parameters ---------- **kwargs : dict Named args. """ super(ScipyOptimizer, self).__init__(**kwargs) warn_deprecation("'ScipyOptimizer' provides backwards compatibility " "with OpenMDAO <= 2.2 ; use 'ScipyOptimizeDriver' instead.")
def record_system_options(problem): """ Record the system options for all systems in the model. Parameters ---------- problem : Problem The problem for which all its systems' options are to be recorded. """ warn_deprecation("The 'record_system_options' function is deprecated. " "Use 'record_model_options' instead.") record_model_options(problem)
def distributed(self, val): """ Provide for setting of the 'distributed' property for backwards compatibility. Parameters ---------- val : bool True if the component has variables that are distributed across multiple processes. """ warn_deprecation("The 'distributed' property provides backwards compatibility " "with OpenMDAO <= 2.4.0 ; use the 'distributed' option instead.") self.options['distributed'] = val
def distributed(self): """ Provide 'distributed' property for backwards compatibility. Returns ------- bool reference to the 'distributed' option. """ warn_deprecation("The 'distributed' property provides backwards compatibility " "with OpenMDAO <= 2.4.0 ; use the 'distributed' option instead.") return self.options['distributed']
def preconditioner(self): """ Provide 'preconditioner' property for backwards compatibility. Returns ------- <LinearSolver> reference to the 'precon' property. """ warn_deprecation("The 'preconditioner' property provides backwards compatibility " "with OpenMDAO <= 1.x ; use 'precon' instead.") return self.precon
def __init__(self, **kwargs): """ Capture Initialize to throw warning. Parameters ---------- **kwargs : dict Deprecated arguments. """ warn_deprecation("'FloatKrigingSurrogate' has been deprecated. Use " "'KrigingSurrogate' instead.") super(FloatKrigingSurrogate, self).__init__(**kwargs)
def record_metadata(self, recording_requester): """ Call record_metadata for all recorders. Parameters ---------- recording_requester : object The object that needs its metadata recorded. """ warn_deprecation("The 'record_metadata' function is deprecated. " "All system and solver options are recorded automatically.")
def preconditioner(self, precon): """ Provide for setting the 'preconditioner' property for backwards compatibility. Parameters ---------- precon : LinearSolver reference to a <LinearSolver> to be assigned to the 'precon' property. """ warn_deprecation("The 'preconditioner' property provides backwards compatibility " "with OpenMDAO <= 1.x ; use 'precon' instead.") self.precon = precon
def preconditioner(self): """ Provide 'preconditioner' property for backwards compatibility. Returns ------- LinearSolver reference to the 'precon' property. """ warn_deprecation("The 'preconditioner' property provides backwards compatibility " "with OpenMDAO <= 1.x ; use 'precon' instead.") return self.precon
def __init__(self, *args, **kwargs): """ Deprecated. Parameters ---------- *args : list of object Positional args. **kwargs : dict Named args. """ super(ScipyIterativeSolver, self).__init__(*args, **kwargs) warn_deprecation('ScipyIterativeSolver is deprecated. Use ScipyKrylov instead.')
def view_model(*args, **kwargs): """ view_model was renamed to n2, but left here for backwards compatibility. Parameters ---------- *args : dict Positional args. **kwargs : dict Keyword args. """ warn_deprecation("view_model is deprecated. Please switch to n2.") n2(*args, **kwargs)
def __init__(self, *args, **kwargs): """ Deprecated. Parameters ---------- *args : list of object Positional args. **kwargs : dict Named args. """ super(NonLinearRunOnce, self).__init__(*args, **kwargs) warn_deprecation('NonLinearRunOnce is deprecated. Use NonlinearRunOnce instead.')
def __init__(self, *args, **kwargs): """ Deprecated. Parameters ---------- *args : list of object Positional args. **kwargs : dict Named args. """ super(ScipyIterativeSolver, self).__init__(*args, **kwargs) warn_deprecation('ScipyIterativeSolver is deprecated. Use ScipyKrylov instead.')
def distributed(self): """ Provide 'distributed' property for backwards compatibility. Returns ------- bool reference to the 'distributed' option. """ warn_deprecation( "The 'distributed' property provides backwards compatibility " "with OpenMDAO <= 2.4.0 ; use the 'distributed' option instead.") return self.options['distributed']
def distributed(self, val): """ Provide for setting of the 'distributed' property for backwards compatibility. Parameters ---------- val : bool True if the component has variables that are distributed across multiple processes. """ warn_deprecation( "The 'distributed' property provides backwards compatibility " "with OpenMDAO <= 2.4.0 ; use the 'distributed' option instead.") self.options['distributed'] = val
def __init__(self, *args, **kwargs): """ Capture Initialize to throw warning. Parameters ---------- *args : list Deprecated arguments. **kwargs : dict Deprecated arguments. """ warn_deprecation("'MetaModelUnStructured' has been deprecated. Use " "'MetaModelUnStructuredComp' instead.") super(MetaModelUnStructured, self).__init__(*args, **kwargs)
def __init__(self, **kwargs): """ Initialize the BsplinesComp. Parameters ---------- **kwargs : dict of keyword arguments Keyword arguments that will be mapped into the Component options. """ super(BsplinesComp, self).__init__(**kwargs) self.cite = CITATIONS warn_deprecation( "'BsplinesComp' has been deprecated. Use 'SplineComp' instead.")
def __init__(self, *args, **kwargs): """ Deprecated. Parameters ---------- *args : list of object Positional args. **kwargs : dict Named args. """ super(NonLinearRunOnce, self).__init__(*args, **kwargs) warn_deprecation( 'NonLinearRunOnce is deprecated. Use NonlinearRunOnce instead.')
def __init__(self, *args, **kwargs): """ Capture Initialize to throw warning. Parameters ---------- *args : list Deprecated arguments. **kwargs : dict Deprecated arguments. """ warn_deprecation("'KSComponent' has been deprecated. Use " "'KSComp' instead.") super(KSComponent, self).__init__(*args, **kwargs)
def __init__(self, *args, **kwargs): """ Capture Initialize to throw warning. Parameters ---------- *args : list Deprecated arguments. **kwargs : dict Deprecated arguments. """ warn_deprecation("'KSComponent' has been deprecated. Use " "'KSComp' instead.") super(KSComponent, self).__init__(*args, **kwargs)
def __init__(self, *args, **kwargs): """ Capture Initialize to throw warning. Parameters ---------- *args : list Deprecated arguments. **kwargs : dict Deprecated arguments. """ warn_deprecation("'MetaModel' component has been deprecated. Use" "'MetaModelUnStructured' instead.") super(Metamodel, self).__init__(*args, **kwargs)
def set_simul_deriv_color(self, coloring): """ See use_fixed_coloring. This method is deprecated. Parameters ---------- coloring : str or Coloring Information about simultaneous coloring for design vars and responses. If a string, then coloring is assumed to be the name of a file that contains the coloring information in pickle format. Otherwise it must be a Coloring object. See the docstring for Coloring for details. """ warn_deprecation("set_simul_deriv_color is deprecated. Use use_fixed_coloring instead.") self.use_fixed_coloring(coloring)
def __init__(self, **kwargs): """ Add a few more attributes. Parameters ---------- **kwargs : dict of keyword arguments available here and in all descendants of this system. """ super(Component, self).__init__(**kwargs) self._state_names = [] self._output_names = [] self._initial_apply_linear = getattr(self, 'apply_linear', None) warn_deprecation('Components should inherit from ImplicitComponent ' 'or ExplicitComponent. This class provides ' 'backwards compatibility with OpenMDAO <= 1.x as ' 'this Component class is deprecated')
def add_input_parameter(self, name, units, val=_unspecified, desc=_unspecified, targets=_unspecified, custom_targets=_unspecified, shape=_unspecified, dynamic=_unspecified): """ Add an input parameter to the trajectory. Parameters ---------- name : str Name of the input parameter. val : float or ndarray Default value of the input parameter at all nodes. desc : str A description of the input parameter. targets : dict or None A dictionary mapping the name of each phase in the trajectory to a sequence of ODE targets for this parameter in each phase. units : str or None or 0 Units in which the input parameter is defined. If 0, use the units declared for the parameter in the ODE. shape : Sequence of int The shape of the input parameter. dynamic : bool True if the targets in the ODE may be dynamic (if the inputs are sized to the number of nodes) else False. """ msg = "DesignParameters and InputParameters are being replaced by Parameters in " + \ "Dymos 1.0.0. Please use add_parameter or set_parameter_options to remove this " + \ "deprecation warning." warn_deprecation(msg) self.add_parameter(name, units, val=val, desc=desc, targets=targets, shape=shape, dynamic=dynamic)
def declare(self, name, default=_undefined, values=None, types=None, type_=None, desc='', upper=None, lower=None, is_valid=None, allow_none=False): r""" Declare an option. The value of the option must satisfy the following: 1. If values only was given when declaring, value must be in values. 2. If types only was given when declaring, value must satisfy isinstance(value, types). 3. It is an error if both values and types are given. Parameters ---------- name : str Name of the option. default : object or Null Optional default value that must be valid under the above 3 conditions. values : set or list or tuple or None Optional list of acceptable option values. types : type or tuple of types or None Optional type or list of acceptable option types. type_ : type or tuple of types or None Deprecated. Use types instead. desc : str Optional description of the option. upper : float or None Maximum allowable value. lower : float or None Minimum allowable value. is_valid : function or None General check function that returns True if valid. allow_none : bool If True, allow None as a value regardless of values or types. """ if type_ is not None: warn_deprecation("In declaration of option '%s' the '_type' arg is deprecated. " "Use 'types' instead." % name) if types is None: types = type_ if values is not None and not isinstance(values, (set, list, tuple)): raise TypeError("'values' must be of type None, list, or tuple - not %s." % values) if types is not None and not isinstance(types, (type, set, list, tuple)): raise TypeError("'types' must be None, a type or a tuple - not %s." % types) if types is not None and values is not None: raise RuntimeError("'types' and 'values' were both specified for option '%s'." % name) default_provided = default is not _undefined self._dict[name] = { 'value': default, 'values': values, 'types': types, 'desc': desc, 'upper': upper, 'lower': lower, 'is_valid': is_valid, 'has_been_set': default_provided, 'allow_none': allow_none, } # If a default is given, check for validity if default_provided: self._assert_valid(name, default)
def add_input(self, name, val=1.0, shape=None, src_indices=None, flat_src_indices=None, units=None, desc=''): """ Add an input variable to the component. Parameters ---------- name : str name of the variable in this component's namespace. val : float or list or tuple or ndarray or Iterable The initial value of the variable being added in user-defined units. Default is 1.0. shape : int or tuple or list or None Shape of this variable, only required if src_indices not provided and val is not an array. Default is None. src_indices : int or list of ints or tuple of ints or int ndarray or Iterable or None The global indices of the source variable to transfer data from. A value of None implies this input depends on all entries of source. Default is None. The shapes of the target and src_indices must match, and form of the entries within is determined by the value of 'flat_src_indices'. flat_src_indices : bool If True, each entry of src_indices is assumed to be an index into the flattened source. Otherwise each entry must be a tuple or list of size equal to the number of dimensions of the source. units : str or None Units in which this input variable will be provided to the component during execution. Default is None, which means it is unitless. desc : str description of the variable Returns ------- dict metadata for added variable """ if units == 'unitless': warn_deprecation("Input '%s' has units='unitless' but 'unitless' " "has been deprecated. Use " "units=None instead. Note that connecting a " "unitless variable to one with units is no longer " "an error, but will issue a warning instead." % name) units = None # First, type check all arguments if not isinstance(name, str): raise TypeError('The name argument should be a string') if not _valid_var_name(name): raise NameError("'%s' is not a valid input name." % name) if not isscalar(val) and not isinstance(val, (list, tuple, ndarray, Iterable)): raise TypeError('The val argument should be a float, list, tuple, ndarray or Iterable') if shape is not None and not isinstance(shape, (int, tuple, list, np.integer)): raise TypeError("The shape argument should be an int, tuple, or list but " "a '%s' was given" % type(shape)) if src_indices is not None and not isinstance(src_indices, (int, list, tuple, ndarray, Iterable)): raise TypeError('The src_indices argument should be an int, list, ' 'tuple, ndarray or Iterable') if units is not None and not isinstance(units, str): raise TypeError('The units argument should be a str or None') # Check that units are valid if units is not None and not valid_units(units): raise ValueError("The units '%s' are invalid" % units) metadata = {} # value, shape: based on args, making sure they are compatible metadata['value'], metadata['shape'], src_indices = ensure_compatible(name, val, shape, src_indices) metadata['size'] = np.prod(metadata['shape']) # src_indices: None or ndarray if src_indices is None: metadata['src_indices'] = None else: metadata['src_indices'] = np.asarray(src_indices, dtype=INT_DTYPE) metadata['flat_src_indices'] = flat_src_indices metadata['units'] = units metadata['desc'] = desc metadata['distributed'] = self.options['distributed'] # We may not know the pathname yet, so we have to use name for now, instead of abs_name. if self._static_mode: var_rel2meta = self._static_var_rel2meta var_rel_names = self._static_var_rel_names else: var_rel2meta = self._var_rel2meta var_rel_names = self._var_rel_names # Disallow dupes if name in var_rel2meta: msg = "Variable name '{}' already exists.".format(name) raise ValueError(msg) var_rel2meta[name] = metadata var_rel_names['input'].append(name) return metadata
def add_output(self, name, val=1.0, shape=None, units=None, res_units=None, desc='', lower=None, upper=None, ref=1.0, ref0=0.0, res_ref=1.0): """ Add an output variable to the component. Parameters ---------- name : str name of the variable in this component's namespace. val : float or list or tuple or ndarray The initial value of the variable being added in user-defined units. Default is 1.0. shape : int or tuple or list or None Shape of this variable, only required if val is not an array. Default is None. units : str or None Units in which the output variables will be provided to the component during execution. Default is None, which means it has no units. res_units : str or None Units in which the residuals of this output will be given to the user when requested. Default is None, which means it has no units. desc : str description of the variable. lower : float or list or tuple or ndarray or Iterable or None lower bound(s) in user-defined units. It can be (1) a float, (2) an array_like consistent with the shape arg (if given), or (3) an array_like matching the shape of val, if val is array_like. A value of None means this output has no lower bound. Default is None. upper : float or list or tuple or ndarray or or Iterable None upper bound(s) in user-defined units. It can be (1) a float, (2) an array_like consistent with the shape arg (if given), or (3) an array_like matching the shape of val, if val is array_like. A value of None means this output has no upper bound. Default is None. ref : float or ndarray Scaling parameter. The value in the user-defined units of this output variable when the scaled value is 1. Default is 1. ref0 : float or ndarray Scaling parameter. The value in the user-defined units of this output variable when the scaled value is 0. Default is 0. res_ref : float or ndarray Scaling parameter. The value in the user-defined res_units of this output's residual when the scaled value is 1. Default is 1. Returns ------- dict metadata for added variable """ if units == 'unitless': warn_deprecation("Output '%s' has units='unitless' but 'unitless' " "has been deprecated. Use " "units=None instead. Note that connecting a " "unitless variable to one with units is no longer " "an error, but will issue a warning instead." % name) units = None if not isinstance(name, str): raise TypeError('The name argument should be a string') if not _valid_var_name(name): raise NameError("'%s' is not a valid output name." % name) if not isscalar(val) and not isinstance(val, (list, tuple, ndarray, Iterable)): msg = 'The val argument should be a float, list, tuple, ndarray or Iterable' raise TypeError(msg) if not isscalar(ref) and not isinstance(val, (list, tuple, ndarray, Iterable)): msg = 'The ref argument should be a float, list, tuple, ndarray or Iterable' raise TypeError(msg) if not isscalar(ref0) and not isinstance(val, (list, tuple, ndarray, Iterable)): msg = 'The ref0 argument should be a float, list, tuple, ndarray or Iterable' raise TypeError(msg) if not isscalar(res_ref) and not isinstance(val, (list, tuple, ndarray, Iterable)): msg = 'The res_ref argument should be a float, list, tuple, ndarray or Iterable' raise TypeError(msg) if shape is not None and not isinstance(shape, (int, tuple, list, np.integer)): raise TypeError("The shape argument should be an int, tuple, or list but " "a '%s' was given" % type(shape)) if units is not None and not isinstance(units, str): raise TypeError('The units argument should be a str or None') if res_units is not None and not isinstance(res_units, str): raise TypeError('The res_units argument should be a str or None') # Check that units are valid if units is not None and not valid_units(units): raise ValueError("The units '%s' are invalid" % units) metadata = {} # value, shape: based on args, making sure they are compatible metadata['value'], metadata['shape'], _ = ensure_compatible(name, val, shape) metadata['size'] = np.prod(metadata['shape']) # units, res_units: taken as is metadata['units'] = units metadata['res_units'] = res_units # desc: taken as is metadata['desc'] = desc if lower is not None: lower = ensure_compatible(name, lower, metadata['shape'])[0] if upper is not None: upper = ensure_compatible(name, upper, metadata['shape'])[0] metadata['lower'] = lower metadata['upper'] = upper # All refs: check the shape if necessary for item, item_name in zip([ref, ref0, res_ref], ['ref', 'ref0', 'res_ref']): if not isscalar(item): it = atleast_1d(item) if it.shape != metadata['shape']: raise ValueError("'{}': When adding output '{}', expected shape {} but got " "shape {} for argument '{}'.".format(self.name, name, metadata['shape'], it.shape, item_name)) if isscalar(ref): self._has_output_scaling |= ref != 1.0 else: self._has_output_scaling |= np.any(ref != 1.0) if isscalar(ref0): self._has_output_scaling |= ref0 != 0.0 else: self._has_output_scaling |= np.any(ref0) if isscalar(res_ref): self._has_resid_scaling |= res_ref != 1.0 else: self._has_resid_scaling |= np.any(res_ref != 1.0) ref = format_as_float_or_array('ref', ref, flatten=True) ref0 = format_as_float_or_array('ref0', ref0, flatten=True) res_ref = format_as_float_or_array('res_ref', res_ref, flatten=True) metadata['ref'] = ref metadata['ref0'] = ref0 metadata['res_ref'] = res_ref metadata['distributed'] = self.options['distributed'] # We may not know the pathname yet, so we have to use name for now, instead of abs_name. if self._static_mode: var_rel2meta = self._static_var_rel2meta var_rel_names = self._static_var_rel_names else: var_rel2meta = self._var_rel2meta var_rel_names = self._var_rel_names # Disallow dupes if name in var_rel2meta: msg = "Variable name '{}' already exists.".format(name) raise ValueError(msg) var_rel2meta[name] = metadata var_rel_names['output'].append(name) return metadata
def view_tree(*args, **kwargs): """ view_tree was renamed to view_model, but left here for backwards compatibility """ warn_deprecation("view_tree is deprecated. Please switch to view_model.") view_model(*args, **kwargs)
def default_surrogate(self, value): warn_deprecation("The 'default_surrogate' attribute provides backwards compatibility " "with earlier version of OpenMDAO; use options['default_surrogate'] " "instead.") self.options['default_surrogate'] = value