def factorial(*args): """ Raise a warning stating that the factorial function is deprecated. """ warn_deprecation("The 'factorial' function is deprecated. " "It is no longer supported for SciPy versions >= 1.5.") return scipy.special.factorial(*args)
def __getitem__(self, name): """ Get an option from the dict or declared default. Parameters ---------- name : str name of the option. Returns ------- value : - value of the option. """ # If the option has been set in this system, return the set value try: meta = self._dict[name] if meta['deprecation'] is not None and name not in self._deprecation_warning_issued: warn_deprecation(meta['deprecation']) self._deprecation_warning_issued.append(name) if meta['has_been_set']: return meta['val'] else: self._raise("Option '{}' is required but has not been set.".format(name)) except KeyError: self._raise("Option '{}' cannot be found".format(name), exc_type=KeyError)
def check_mpi_env(): """ Determine if the environment variable governing MPI usage is set. Returns ------- bool True if MPI is required, False if it's to be skipped, None if not set. """ if 'OPENMDAO_REQUIRE_MPI' in os.environ: warn_deprecation("Set OPENMDAO_USE_MPI instead of OPENMDAO_REQUIRE_MPI.") mpi_selection = os.environ.get('OPENMDAO_USE_MPI', os.environ.get('OPENMDAO_REQUIRE_MPI', None)) # If OPENMDAO_USE_MPI is set to a postive value, the run will fail # immediately if the import fails if str(mpi_selection).lower() in ['always', '1', 'true', 'yes', 'y', 'on']: return True # If set to something else, no import is attempted. if mpi_selection is not None: return False # If unset, the import will be attempted but give no warning if it fails. return None
def _handle_deprecation(self, name, meta): """ Update the warning counter and do name translation of deprecated variable if needed. Parameters ---------- name : str Name of the deprecated variable. meta : dict Metadata dictionary corresponding to the deprecated variable. Returns ------- str The variable name, updated to the non-deprecated name if found. dict Metadata dictionary corresponding to either the original variable or to the non-deprecated varsion if found. """ msg, alias, show_warn = meta['deprecation'] if show_warn: warn_deprecation(msg) meta['deprecation'][2] = False # turn off future warnings for this variable if alias: try: meta = self._dict[alias] except KeyError: msg = f"Can't find aliased option '{alias}' for deprecated option '{name}'." self._raise(msg, exc_type=KeyError) name = alias return name, meta
def _n2_cmd(options, user_args): """ Process command line args and call n2 on the specified file. Parameters ---------- options : argparse Namespace Command line options. user_args : list of str Command line options after '--' (if any). Passed to user script. """ filename = _to_filename(options.file[0]) if filename.endswith('.py'): # the file is a python script, run as a post_setup hook def _noraise(prob): prob.model._raise_connection_errors = False if options.use_declare_partial_info: warn_deprecation("'--use_declare_partial_info' is now the" " default and the option is ignored.") def _viewmod(prob): n2(prob, outfile=options.outfile, show_browser=not options.no_browser, title=options.title, embeddable=options.embeddable) hooks._register_hook('setup', 'Problem', pre=_noraise, ncalls=1) hooks._register_hook('final_setup', 'Problem', post=_viewmod, exit=True) ignore_errors(True) _load_and_exec(options.file[0], user_args) else: # assume the file is a recording, run standalone n2(filename, outfile=options.outfile, title=options.title, show_browser=not options.no_browser, embeddable=options.embeddable)
def __setitem__(self, name, value): """ Set an option in the local dictionary. Parameters ---------- name : str name of the option. value : - value of the option to be value- and type-checked if declared. """ try: meta = self._dict[name] except KeyError: # The key must have been declared. msg = "Option '{}' cannot be set because it has not been declared." self._raise(msg.format(name), exc_type=KeyError) if meta['deprecation'] is not None and name not in self._deprecation_warning_issued: warn_deprecation(meta['deprecation']) self._deprecation_warning_issued.append(name) if self._read_only: self._raise("Tried to set read-only option '{}'.".format(name), exc_type=KeyError) self._assert_valid(name, value) meta['val'] = value meta['has_been_set'] = True
def add_approximation(self, abs_key, system, kwargs, vector=None): """ Use this approximation scheme to approximate the derivative d(of)/d(wrt). Parameters ---------- abs_key : tuple(str,str) Absolute name pairing of (of, wrt) for the derivative. system : System Containing System. kwargs : dict Additional keyword arguments, to be interpreted by sub-classes. vector : ndarray or None Direction for difference when using directional derivatives. """ options = self.DEFAULT_OPTIONS.copy() options.update(kwargs) if options['order'] is None: # User-submitted options for method=='fd' are all checked here. form = options['form'] if form in DEFAULT_ORDER: options['order'] = DEFAULT_ORDER[options['form']] else: raise ValueError( "{}: '{}' is not a valid form of finite difference; must be " "one of {}".format(system.msginfo, form, list(DEFAULT_ORDER.keys()))) step_calc = options['step_calc'] step_calcs = ['abs', 'rel', 'rel_legacy', 'rel_avg', 'rel_element'] if step_calc not in step_calcs: raise ValueError( f"{system.msginfo}: '{step_calc}' is not a valid setting for " f"step_calc; must be one of {step_calcs}.") elif options['directional'] and step_calc == 'rel_element': raise ValueError( f"{system.msginfo}: Option 'directional' is not supported when " "'step_calc' is set to 'rel_element.'") elif step_calc == 'rel': warn_deprecation( "When using 'rel' as the step_calc, the fd stepsize is currently " "scaled by the norm of the vector variable. This is not ideal for" " larger vectors, and this behavior is being changed in " "OpenMDAO 3.12.0. To preserve the older way of doing this " "calculation, set step_calc to 'rel_legacy'.") options['vector'] = vector wrt = abs_key[1] if wrt in self._wrt_meta: self._wrt_meta[wrt].update(options) else: self._wrt_meta[wrt] = options self._reset() # force later regen of approx_groups
def record_metadata(self, recording_requester): """ Route the record_metadata call to the proper method. Parameters ---------- recording_requester : object The object that would like to record its metadata. """ warn_deprecation("The 'record_metadata' function is deprecated. " "All system and solver options are recorded automatically.")
def record_system_options(problem): """ Record the system options for all systems in the model. Parameters ---------- problem : Problem The problem for which all its systems' options are to be recorded. """ warn_deprecation("The 'record_system_options' function is deprecated. " "Use 'record_model_options' instead.") record_model_options(problem)
def record_metadata(self, recording_requester): """ Call record_metadata for all recorders. Parameters ---------- recording_requester : object The object that needs its metadata recorded. """ warn_deprecation( "The 'record_metadata' function is deprecated. " "All system and solver options are recorded automatically.")
def system_options(self): """ Provide '_system_options' property for backwards compatibility. Returns ------- dict reference to the _system_options attribute. """ warn_deprecation("The system_options attribute is deprecated. " "Use `list_model_options` instead.") return self._system_options
def _update_new_style(src_indices, new_style, prefix=""): if not new_style: if isinstance(src_indices, tuple): for part in src_indices: if part is ... or isinstance(part, slice): return True warn_deprecation( f"{prefix}: 'src_indices={src_indices}' is specified in" " a deprecated format. In a future release, 'src_indices'" " will be expected to use NumPy array indexing.") return new_style
def system_metadata(self): """ Provide 'system_metadata' property for backwards compatibility. Returns ------- dict reference to the '_system_options' attribute. """ warn_deprecation( "The BaseCaseReader.system_metadata attribute is deprecated. " "Use `list_model_options` instead.") return self._system_options
def __init__(self, **kwargs): """ Instantiate DemuxComp and populate private members. """ super().__init__(**kwargs) self._vars = {} self._output_names = {} self._no_check_partials = True warn_deprecation("DemuxComp is being deprecated. This same functionality can be achieved " "directly in the connect/promotes indices arg using om.slicer.")
def get_conversion(old_units, new_units): """ Return conversion factor and offset between old and new units (deprecated). Parameters ---------- old_units : str Original units as a string. new_units : str New units to return the value in. Returns ------- (float, float) Conversion factor and offset. """ warn_deprecation("'get_conversion' has been deprecated. Use " "'unit_conversion' instead.") return unit_conversion(old_units, new_units)
def simple_warning(msg, category=UserWarning, stacklevel=2): """ Display a simple warning message without the annoying extra line showing the warning call. Parameters ---------- msg : str The warning message. category : class The warning class. stacklevel : int Number of levels up the stack to identify as the warning location. """ warn_deprecation('simple_warning is deprecated. ' 'Use openmdao.utils.om_warnings.issue_warning instead.') old_format = warnings.formatwarning warnings.formatwarning = _warn_simple_format try: warnings.warn(msg, category, stacklevel) finally: warnings.formatwarning = old_format
def assert_rel_error(test_case, actual, desired, tolerance=1e-15): """ Check relative error. Determine that the relative error between `actual` and `desired` is within `tolerance`. If `desired` is zero, then use absolute error. Parameters ---------- test_case : class:`unittest.TestCase` TestCase instance used for assertions. actual : float, array-like, dict The value from the test. desired : float, array-like, dict The value expected. tolerance : float Maximum relative error ``(actual - desired) / desired``. Returns ------- float The error. """ warn_deprecation("'assert_rel_error' has been deprecated. Use " "'assert_near_equal' instead.") if isinstance(actual, dict) and isinstance(desired, dict): actual_keys = set(actual.keys()) desired_keys = set(desired.keys()) if actual_keys.symmetric_difference(desired_keys): msg = 'Actual and desired keys differ. Actual extra keys: {}, Desired extra keys: {}' actual_extra = actual_keys.difference(desired_keys) desired_extra = desired_keys.difference(actual_keys) test_case.fail(msg.format(actual_extra, desired_extra)) error = 0. for key in actual_keys: try: new_error = assert_rel_error(test_case, actual[key], desired[key], tolerance) error = max(error, new_error) except test_case.failureException as exception: msg = '{}: '.format(key) + str(exception) raise test_case.failureException(msg) from None elif isinstance(actual, float) and isinstance(desired, float): if isnan(actual) and not isnan(desired): test_case.fail('actual nan, desired %s' % desired) if desired != 0: error = (actual - desired) / desired else: error = actual if abs(error) > tolerance: test_case.fail('actual %s, desired %s, rel error %s, tolerance %s' % (actual, desired, error, tolerance)) # array values else: actual = np.atleast_1d(actual) desired = np.atleast_1d(desired) if actual.shape != desired.shape: test_case.fail( 'actual and desired have differing shapes.' ' actual {}, desired {}'.format(actual.shape, desired.shape)) if not np.all(np.isnan(actual) == np.isnan(desired)): if actual.size == 1 and desired.size == 1: test_case.fail('actual %s, desired %s' % (actual, desired)) else: test_case.fail('actual and desired values have non-matching nan' ' values') if np.linalg.norm(desired) == 0: error = np.linalg.norm(actual) else: error = np.linalg.norm(actual - desired) / np.linalg.norm(desired) if abs(error) > tolerance: if actual.size < 10 and desired.size < 10: test_case.fail('actual %s, desired %s, rel error %s, tolerance %s' % (actual, desired, error, tolerance)) else: test_case.fail('arrays do not match, rel error %.3e > tol (%.3e)' % (error, tolerance)) return error
def n2(data_source, outfile=_default_n2_filename, case_id=None, show_browser=True, embeddable=False, title=None, use_declare_partial_info=False, display_in_notebook=True): """ Generate an HTML file containing a tree viewer. Optionally opens a web browser to view the file. Parameters ---------- data_source : <Problem> or str The Problem or case recorder database containing the model or model data. outfile : str, optional The name of the final output file. case_id : int, str, or None Case name or index of case in SQL file if data_source is a database. show_browser : bool, optional If True, pop up the system default web browser to view the generated html file. Defaults to True. embeddable : bool, optional If True, gives a single HTML file that doesn't have the <html>, <DOCTYPE>, <body> and <head> tags. If False, gives a single, standalone HTML file for viewing. title : str, optional The title for the diagram. Used in the HTML title. use_declare_partial_info : ignored This option is no longer used because it is now always true. Still present for backwards compatibility. display_in_notebook : bool, optional If True, display the N2 diagram in the notebook, if this is called from a notebook. Defaults to True. """ # grab the model viewer data model_data = _get_viewer_data(data_source, case_id=case_id) # if MPI is active only display one copy of the viewer if MPI and MPI.COMM_WORLD.rank != 0: return options = {} model_data['options'] = options if use_declare_partial_info: warn_deprecation("'use_declare_partial_info' is now the" " default and the option is ignored.") import openmdao openmdao_dir = os.path.dirname(inspect.getfile(openmdao)) vis_dir = os.path.join(openmdao_dir, "visualization/n2_viewer") if title: title = f"OpenMDAO Model Hierarchy and N2 diagram: {title}" else: title = "OpenMDAO Model Hierarchy and N2 diagram" html_vars = { 'title': title, 'embeddable': "embedded-n2" if embeddable else "non-embedded-n2", 'openmdao_version': openmdao_version, 'model_data': model_data } HtmlPreprocessor(os.path.join(vis_dir, "index.html"), outfile, allow_overwrite=True, var_dict=html_vars, json_dumps_default=default_noraise, verbose=False).run() if notebook: if display_in_notebook: # display in Jupyter Notebook outfile = os.path.relpath(outfile) if not colab: display(IFrame(src=outfile, width="100%", height=700)) else: display(HTML(outfile)) elif show_browser: # open it up in the browser from openmdao.utils.webview import webview webview(outfile)
def n2(data_source, outfile='n2.html', case_id=None, show_browser=True, embeddable=False, title=None, use_declare_partial_info=False): """ Generate an HTML file containing a tree viewer. Optionally opens a web browser to view the file. Parameters ---------- data_source : <Problem> or str The Problem or case recorder database containing the model or model data. outfile : str, optional The name of the final output file. case_id : int, str, or None Case name or index of case in SQL file if data_source is a database. show_browser : bool, optional If True, pop up the system default web browser to view the generated html file. Defaults to True. embeddable : bool, optional If True, gives a single HTML file that doesn't have the <html>, <DOCTYPE>, <body> and <head> tags. If False, gives a single, standalone HTML file for viewing. title : str, optional The title for the diagram. Used in the HTML title. use_declare_partial_info : ignored This option is no longer used because it is now always true. Still present for backwards compatibility. """ # grab the model viewer data model_data = _get_viewer_data(data_source, case_id=case_id) # if MPI is active only display one copy of the viewer if MPI and MPI.COMM_WORLD.rank != 0: return options = {} model_data['options'] = options if use_declare_partial_info: warn_deprecation("'use_declare_partial_info' is now the" " default and the option is ignored.") raw_data = json.dumps(model_data, default=default_noraise).encode('utf8') b64_data = str(base64.b64encode(zlib.compress(raw_data)).decode("ascii")) model_data = 'var compressedModel = "%s";' % b64_data import openmdao openmdao_dir = os.path.dirname(inspect.getfile(openmdao)) vis_dir = os.path.join(openmdao_dir, "visualization/n2_viewer") libs_dir = os.path.join(vis_dir, "libs") src_dir = os.path.join(vis_dir, "src") style_dir = os.path.join(vis_dir, "style") assets_dir = os.path.join(vis_dir, "assets") # grab the libraries, src and style lib_dct = { 'd3': 'd3.v5.min', 'awesomplete': 'awesomplete', 'vk_beautify': 'vkBeautify', 'pako_inflate': 'pako_inflate.min', 'json5': 'json5_2.2.0.min' } libs = read_files(lib_dct.values(), libs_dir, 'js') src_names = \ 'utils', \ 'SymbolType', \ 'N2TreeNode', \ 'ModelData', \ 'N2Style', \ 'N2Window', \ 'N2Layout', \ 'N2MatrixCell', \ 'N2Legend', \ 'N2Matrix', \ 'N2Arrow', \ 'N2Search', \ 'N2Toolbar', \ 'N2Diagram', \ 'NodeInfo', \ 'N2UserInterface', \ 'defaults', \ 'ptN2' srcs = read_files(src_names, src_dir, 'js') style_names = \ 'window', \ 'partition_tree', \ 'n2toolbar-icons', \ 'toolbar', \ 'legend', \ 'awesomplete' styles = read_files((style_names), style_dir, 'css') with open(os.path.join(style_dir, "n2toolbar-icons-font.woff"), "rb") as f: encoded_font = str(base64.b64encode(f.read()).decode("ascii")) with open(os.path.join(style_dir, "logo_png.b64"), "r") as f: logo_png = str(f.read()) with open(os.path.join(assets_dir, "spinner.png"), "rb") as f: waiting_icon = str(base64.b64encode(f.read()).decode("ascii")) with open(os.path.join(assets_dir, "n2toolbar_screenshot_png.b64"), "r") as f: n2toolbar_png = str(f.read()) if title: title = "OpenMDAO Model Hierarchy and N2 diagram: %s" % title else: title = "OpenMDAO Model Hierarchy and N2 diagram" src_names = ('N2ErrorHandling', ) head_srcs = read_files(src_names, src_dir, 'js') h = DiagramWriter(filename=os.path.join(vis_dir, "index.html"), title=title, styles=styles, embeddable=embeddable, head_srcs=head_srcs) if (embeddable): h.insert("non-embedded-n2", "embedded-n2") # put all style and JS into index h.insert('{{n2toolbar-icons}}', encoded_font) h.insert('{{logo_png}}', logo_png) h.insert('{{waiting_icon}}', waiting_icon) h.insert('{{n2toolbar_png}}', n2toolbar_png) h.insert('{{om_version}}', openmdao_version) for k, v in lib_dct.items(): h.insert('{{{}_lib}}'.format(k), write_script(libs[v], indent=_IND)) for name, code in srcs.items(): h.insert('{{{}_lib}}'.format(name.lower()), write_script(code, indent=_IND)) h.insert('{{model_data}}', write_script(model_data, indent=_IND)) # Write output file h.write(outfile) if notebook: # display in Jupyter Notebook outfile = os.path.relpath(outfile) if not colab: display(IFrame(src=outfile, width="100%", height=700)) else: display(HTML(outfile)) elif show_browser: # open it up in the browser from openmdao.utils.webview import webview webview(outfile)
def _compute_totals(self, of=None, wrt=None, return_format='flat_dict', global_names=None, use_abs_names=True): """ Compute derivatives of desired quantities with respect to desired inputs. All derivatives are returned using driver scaling. Parameters ---------- of : list of variable name str or None Variables whose derivatives will be computed. Default is None, which uses the driver's objectives and constraints. wrt : list of variable name str or None Variables with respect to which the derivatives will be computed. Default is None, which uses the driver's desvars. return_format : str Format to return the derivatives. Default is a 'flat_dict', which returns them in a dictionary whose keys are tuples of form (of, wrt). For the scipy optimizer, 'array' is also supported. global_names : bool Deprecated. Use 'use_abs_names' instead. use_abs_names : bool Set to True when passing in absolute names to skip some translation steps. Returns ------- derivs : object Derivatives in form requested by 'return_format'. """ problem = self._problem() total_jac = self._total_jac debug_print = 'totals' in self.options['debug_print'] and (not MPI or problem.comm.rank == 0) if debug_print: header = 'Driver total derivatives for iteration: ' + str(self.iter_count) print(header) print(len(header) * '-' + '\n') if global_names is not None: warn_deprecation("'global_names' is deprecated in calls to _compute_totals. " "Use 'use_abs_names' instead.") use_abs_names = global_names if problem.model._owns_approx_jac: self._recording_iter.push(('_compute_totals_approx', 0)) try: if total_jac is None: total_jac = _TotalJacInfo(problem, of, wrt, use_abs_names, return_format, approx=True, debug_print=debug_print) # Don't cache linear constraint jacobian if not total_jac.has_lin_cons: self._total_jac = total_jac totals = total_jac.compute_totals_approx(initialize=True) else: totals = total_jac.compute_totals_approx() finally: self._recording_iter.pop() else: if total_jac is None: total_jac = _TotalJacInfo(problem, of, wrt, use_abs_names, return_format, debug_print=debug_print) # don't cache linear constraint jacobian if not total_jac.has_lin_cons: self._total_jac = total_jac self._recording_iter.push(('_compute_totals', 0)) try: totals = total_jac.compute_totals() finally: self._recording_iter.pop() if self._rec_mgr._recorders and self.recording_options['record_derivatives']: metadata = create_local_meta(self._get_name()) total_jac.record_derivatives(self, metadata) return totals
def _setup_expressions(self): """ Set up the expressions. This is called during setup_procs and after each call to "add_expr" from configure. """ global _not_complex_safe exprs = self._exprs kwargs = self._kwargs units = self.options['units'] shape = self.options['shape'] shape_by_conn = self.options['shape_by_conn'] warned = False if shape is not None and shape_by_conn: raise RuntimeError(f"{self.msginfo}: Can't set both shape and shape_by_conn.") outs = set() allvars = set() self._exprs_info = exprs_info = [(self._parse_for_out_vars(expr.split('=', 1)[0]), self._parse_for_names(expr)) for expr in exprs] self._requires_fd = {} # find all of the variables and which ones are outputs for i, (onames, names) in enumerate(exprs_info): outs.update(onames) allvars.update(names[0]) if _not_complex_safe.intersection(names[1]): for o in onames: self._requires_fd[o] = names if self._requires_fd: inps = [] for out, (rhsvars, funcs) in self._requires_fd.items(): iset = rhsvars.difference(outs) self._requires_fd[out] = (iset, funcs) inps.extend(iset) self._no_check_partials = False self.set_check_partial_options(wrt=inps, method='fd') kwargs2 = {} init_vals = {} # make sure all kwargs are legit for arg, val in kwargs.items(): if arg not in allvars: raise RuntimeError("%s: arg '%s' in call to ExecComp() " "does not refer to any variable in the " "expressions %s" % (self.msginfo, arg, exprs)) if isinstance(val, dict): diff = set(val.keys()) - _allowed_meta if diff: raise RuntimeError("%s: the following metadata names were not " "recognized for variable '%s': %s" % (self.msginfo, arg, sorted(diff))) if 'val' in val and 'value' in val: raise RuntimeError(f"{self.msginfo}: 'val' and 'value' at the same time, use " "'val'.") elif 'value' in val and not warned: warn_deprecation(f"{self.msginfo}: 'value' will be deprecated in 4.0. Please " "use 'val' in the future.") if 'value' in val: val['val'] = val.pop('value') warned = True kwargs2[arg] = val.copy() if units is not None: if 'units' in val and val['units'] != units: raise RuntimeError("%s: units of '%s' have been specified for " "variable '%s', but units of '%s' have been " "specified for the entire component." % (self.msginfo, val['units'], arg, units)) else: kwargs2[arg]['units'] = units if shape is not None: if 'shape' in val and val['shape'] != shape: raise RuntimeError("%s: shape of %s has been specified for " "variable '%s', but shape of %s has been " "specified for the entire component." % (self.msginfo, val['shape'], arg, shape)) elif 'val' in val and np.atleast_1d(val['val']).shape != shape: raise RuntimeError("%s: value of shape %s has been specified for " "variable '%s', but shape of %s has been " "specified for the entire component." % (self.msginfo, np.atleast_1d(val['val']).shape, arg, shape)) else: init_vals[arg] = np.ones(shape) if 'val' in val: init_vals[arg] = val['val'] del kwargs2[arg]['val'] if shape_by_conn or 'shape_by_conn' in val or 'copy_shape' in val: if val.get('shape') is not None or val.get('val') is not None: raise RuntimeError(f"{self.msginfo}: Can't set 'shape' or 'val' for " f"variable '{arg}' along with 'copy_shape' or " "'shape_by_conn'.") if 'shape' in val: if arg not in init_vals: init_vals[arg] = np.ones(val['shape']) elif np.atleast_1d(init_vals[arg]).shape != val['shape']: raise RuntimeError("%s: shape of %s has been specified for variable " "'%s', but a value of shape %s has been provided." % (self.msginfo, str(val['shape']), arg, str(np.atleast_1d(init_vals[arg]).shape))) del kwargs2[arg]['shape'] else: init_vals[arg] = val if self._static_mode: var_rel2meta = self._static_var_rel2meta else: var_rel2meta = self._var_rel2meta for var in sorted(allvars): meta = kwargs2.get(var, { 'units': units, 'shape': shape, 'shape_by_conn': shape_by_conn}) # if user supplied an initial value, use it, otherwise set to 1.0 if var in init_vals: val = init_vals[var] else: val = 1.0 if var in var_rel2meta: # Input/Output already exists, but we may be setting defaults for the first time. # Note that there is only one submitted dictionary of defaults. current_meta = var_rel2meta[var] for kname, kvalue in meta.items(): if kvalue is not None: current_meta[kname] = kvalue new_val = kwargs[var].get('val') if new_val is not None: current_meta['val'] = new_val else: # new input and/or output. if var in outs: current_meta = self.add_output(var, val, **meta) else: current_meta = self.add_input(var, val, **meta) if var not in init_vals: init_vals[var] = current_meta['val'] self._codes = self._compile_exprs(self._exprs)
def add_output(self, name, val=1.0, shape=None, units=None, res_units=None, desc='', lower=None, upper=None, ref=None, ref0=None, res_ref=None, tags=None, shape_by_conn=False, copy_shape=None, distributed=None): """ Add an independent variable to this component. Parameters ---------- name : str Name of the variable in this component's namespace. val : float or list or tuple or ndarray The initial value of the variable being added in user-defined units. Default is 1.0. shape : int or tuple or list or None Shape of this variable, only required if val is not an array. Default is None. units : str or None Units in which the output variables will be provided to the component during execution. Default is None, which means it has no units. res_units : None This argument is deprecated because it was unused. desc : str Description of the variable. lower : None This argument is deprecated because it was unused. upper : None This argument is deprecated because it was unused. ref : None This argument is deprecated because it was unused. ref0 : None This argument is deprecated because it was unused. res_ref : None This argument is deprecated because it was unused. tags : str or list of strs User defined tags that can be used to filter what gets listed when calling list_outputs. shape_by_conn : bool If True, shape this output to match its connected input(s). copy_shape : str or None If a str, that str is the name of a variable. Shape this output to match that of the named variable. distributed : bool If True, this variable is a distributed variable, so it can have different sizes/values across MPI processes. Returns ------- dict Metadata for added variable. """ if res_units is not None: warn_deprecation( f"{self.msginfo}: The 'res_units' argument was used when adding " f"output '{name}'. This argument has been deprecated and will be " "removed in a future version.") if lower is not None: warn_deprecation( f"{self.msginfo}: The 'lower' argument was used when adding " f"output '{name}'. This argument has been deprecated and will be " "removed in a future version.") if upper is not None: warn_deprecation( f"{self.msginfo}: The 'upper' argument was used when adding " f"output '{name}'. This argument has been deprecated and will be " "removed in a future version.") if ref0 is not None: warn_deprecation( f"{self.msginfo}: The 'ref0' argument was used when adding " f"output '{name}'. This argument has been deprecated and will be " "removed in a future version.") if res_ref is not None: warn_deprecation( f"{self.msginfo}: The 'res_ref' argument was used when adding " f"output '{name}'. This argument has been deprecated and will be " "removed in a future version.") if ref is not None: warn_deprecation( f"{self.msginfo}: The 'ref' argument was used when adding " f"output '{name}'. This argument has been deprecated and will be " "removed in a future version.") ref = 1.0 ref0 = 0.0 if res_ref is None: res_ref = ref if tags is None: tags = {'indep_var'} else: tags = make_set(tags) | {'indep_var'} kwargs = { 'shape': shape, 'units': units, 'res_units': res_units, 'desc': desc, 'lower': lower, 'upper': upper, 'ref': ref, 'ref0': ref0, 'res_ref': res_ref, 'tags': tags, 'shape_by_conn': shape_by_conn, 'copy_shape': copy_shape, 'distributed': distributed, } return super().add_output(name, val, **kwargs)
def declare(self, name, default=_UNDEFINED, values=None, types=None, desc='', upper=None, lower=None, check_valid=None, allow_none=False, recordable=True, deprecation=None): r""" Declare an option. The value of the option must satisfy the following: 1. If values only was given when declaring, value must be in values. 2. If types only was given when declaring, value must satisfy isinstance(value, types). 3. It is an error if both values and types are given. Parameters ---------- name : str Name of the option. default : object or Null Optional default value that must be valid under the above 3 conditions. values : set or list or tuple or None Optional list of acceptable option values. types : type or tuple of types or None Optional type or list of acceptable option types. desc : str Optional description of the option. upper : float or None Maximum allowable value. lower : float or None Minimum allowable value. check_valid : function or None User-supplied function with arguments (name, value) that raises an exception if the value is not valid. allow_none : bool If True, allow None as a value regardless of values or types. recordable : bool If True, add to recorder. deprecation : str or tuple or None If None, it is not deprecated. If a str, use as a DeprecationWarning during __setitem__ and __getitem__. If a tuple of the form (msg, new_name), display msg as with str, and forward any __setitem__/__getitem__ to new_name. """ match = namecheck_rgx.match(name) if match is None or match.group() != name: warn_deprecation(f"'{name}' is not a valid python name and will become an invalid " "option name in a future release. You can prevent this warning (and " "future exceptions) by declaring this option using a valid python " "name.") if values is not None and not isinstance(values, (set, list, tuple)): self._raise(f"In declaration of option '{name}', the 'values' arg must be of type None," f" list, or tuple - not {values}.", exc_type=TypeError) if types is not None and not isinstance(types, (type, set, list, tuple)): self._raise(f"In declaration of option '{name}', the 'types' arg must be None, a type " f"or a tuple - not {types}.", exc_type=TypeError) if types is not None and values is not None: self._raise(f"'types' and 'values' were both specified for option '{name}'.") if types is bool: values = (True, False) if not recordable: self._all_recordable = False default_provided = default is not _UNDEFINED if default_provided and default is None: # specifying default=None implies allow_none allow_none = True alias = None if deprecation is not None: if isinstance(deprecation, (list, tuple)): if len(deprecation) != 2: self._raise("deprecation must be None, str, or a tuple or list containing " "(str, str).", RuntimeError) dep, alias = deprecation # [message, alias, display warning (becomes False after first display)] deprecation = [dep, alias, True] else: deprecation = [deprecation, None, True] self._dict[name] = { 'val': default, 'values': values, 'types': types, 'desc': desc, 'upper': upper, 'lower': lower, 'check_valid': check_valid, 'has_been_set': default_provided, 'allow_none': allow_none, 'recordable': recordable, 'deprecation': deprecation, } # If a default is given, check for validity if default_provided: self._assert_valid(name, default)
def __init__(self, method="slinear", points=None, values=None, x_interp=None, extrapolate=False, num_cp=None, **kwargs): """ Initialize an InterpND object. This object can be setup and used to interpolate on a curve or multi-dimensional table. It can also be used to setup an interpolating spline that can be evaluated at fixed locations. For interpolation, specify values and points. For spline evaluation, specifiy x_interp and either points or num_cp. """ if not isinstance(method, str): msg = "Argument 'method' should be a string." raise ValueError(msg) elif method not in INTERP_METHODS: all_m = ', '.join(['"' + m + '"' for m in INTERP_METHODS]) raise ValueError( 'Interpolation method "%s" is not defined. Valid methods are ' '%s.' % (method, all_m)) elif method == 'akima1D': warn_deprecation( "The 'akima1D' method has been renamed to '1D-akima'.") elif method == 'trilinear': warn_deprecation( "The 'trilinear' method has been renamed to '3D-slinear'.") self.extrapolate = extrapolate # The table points are always defined, by specifying either the points directly, or num_cp. if points is None: if num_cp is not None: points = [np.linspace(0.0, 1.0, num_cp)] else: msg = "Either 'points' or 'num_cp' must be specified." raise ValueError(msg) else: if isinstance(points, np.ndarray): points = [points] for i, p in enumerate(points): n_p = len(p) if not np.all(np.diff(p) > 0.): raise ValueError( "The points in dimension %d must be strictly " "ascending" % i) if not np.asarray(p).ndim == 1: raise ValueError("The points in dimension %d must be " "1-dimensional" % i) # Table Interpolation if x_interp is None: if values is None: msg = "Either 'values' or 'x_interp' must be specified." raise ValueError(msg) if method == 'bsplines': msg = "Method 'bsplines' is not supported for table interpolation." raise ValueError(msg) if not hasattr(values, 'ndim'): # allow reasonable duck-typed values values = np.asarray(values) if hasattr(values, 'dtype') and hasattr(values, 'astype'): if not np.issubdtype(values.dtype, np.inexact): values = values.astype(float) if len(points) > values.ndim: raise ValueError( "There are %d point arrays, but values has %d " "dimensions" % (len(points), values.ndim)) if (method.startswith('scipy') or method == 'akima') and \ (np.iscomplexobj(values[:]) or np.any(np.iscomplex(points[0]))): msg = f"Interpolation method '{method}' does not support complex points or values." raise ValueError(msg) for i, p in enumerate(points): n_p = len(p) if values.shape[i] != n_p: raise ValueError("There are %d points and %d values in " "dimension %d" % (len(p), values.shape[i], i)) self.grid = tuple([np.asarray(p) for p in points]) self.values = values self.x_interp = x_interp self._xi = None self._d_dx = None self._d_dvalues = None self._compute_d_dvalues = False self._compute_d_dx = True # Cache spline coefficients. interp = INTERP_METHODS[method] if method.startswith('scipy'): kwargs['interp_method'] = method table = interp(self.grid, values, interp, **kwargs) table.check_config() self.table = table self._interp = interp self._interp_options = kwargs