def _set_partials_meta(self): """ Set subjacobian info into our jacobian. """ with self.jacobian_context() as J: outputs = self._var_abs_names['output'] inputs = self._var_abs_names['input'] for wrt_name, wrt_vars in (('output', outputs), ('input', inputs)): for abs_key in product(outputs, wrt_vars): if abs_key in self._subjacs_info: meta = self._subjacs_info[abs_key] else: meta = SUBJAC_META_DEFAULTS.copy() dependent = meta['dependent'] if not dependent: continue if meta['value'] is None: out_size = self._var_abs2meta['output'][ abs_key[0]]['size'] in_size = self._var_abs2meta[wrt_name][ abs_key[1]]['size'] meta['value'] = np.zeros((out_size, in_size)) J._set_partials_meta(abs_key, meta, wrt_name == 'input') method = meta.get('method', False) if method: self._approx_schemes[method].add_approximation( abs_key, meta) for approx in itervalues(self._approx_schemes): approx._init_approximations()
def _approx_partials(self, of, wrt, method='fd', **kwargs): """ Inform the framework that the specified derivatives are to be approximated. Parameters ---------- of : str or list of str The name of the residual(s) that derivatives are being computed for. May also contain a glob pattern. wrt : str or list of str The name of the variables that derivatives are taken with respect to. This can contain the name of any input or output variable. May also contain a glob pattern. method : str The type of approximation that should be used. Valid options include: - 'fd': Finite Difference **kwargs : dict Keyword arguments for controlling the behavior of the approximation. """ pattern_matches = self._find_partial_matches(of, wrt) for of_bundle, wrt_bundle in product(*pattern_matches): of_pattern, of_matches = of_bundle wrt_pattern, wrt_matches = wrt_bundle if not of_matches: raise ValueError( 'No matches were found for of="{}"'.format(of_pattern)) if not wrt_matches: raise ValueError( 'No matches were found for wrt="{}"'.format(wrt_pattern)) info = self._subjacs_info for rel_key in product(of_matches, wrt_matches): abs_key = rel_key2abs_key(self, rel_key) if abs_key in info: meta = info[abs_key] else: meta = SUBJAC_META_DEFAULTS.copy() meta['method'] = method meta.update(kwargs) info[abs_key] = meta
def _approx_partials(self, of, wrt, method='fd', **kwargs): """ Inform the framework that the specified derivatives are to be approximated. Parameters ---------- of : str or list of str The name of the residual(s) that derivatives are being computed for. May also contain a glob pattern. wrt : str or list of str The name of the variables that derivatives are taken with respect to. This can contain the name of any input or output variable. May also contain a glob pattern. method : str The type of approximation that should be used. Valid options include: - 'fd': Finite Difference **kwargs : dict Keyword arguments for controlling the behavior of the approximation. """ pattern_matches = self._find_partial_matches(of, wrt) for of_bundle, wrt_bundle in product(*pattern_matches): of_pattern, of_matches = of_bundle wrt_pattern, wrt_matches = wrt_bundle if not of_matches: raise ValueError('No matches were found for of="{}"'.format(of_pattern)) if not wrt_matches: raise ValueError('No matches were found for wrt="{}"'.format(wrt_pattern)) info = self._subjacs_info for rel_key in product(of_matches, wrt_matches): abs_key = rel_key2abs_key(self, rel_key) if abs_key in info: meta = info[abs_key] else: meta = SUBJAC_META_DEFAULTS.copy() meta['method'] = method meta.update(kwargs) info[abs_key] = meta
def _declare_partials(self, of, wrt, dependent=True, rows=None, cols=None, val=None): """ Store subjacobian metadata for later use. Parameters ---------- of : str or list of str The name of the residual(s) that derivatives are being computed for. May also contain a glob pattern. wrt : str or list of str The name of the variables that derivatives are taken with respect to. This can contain the name of any input or output variable. May also contain a glob pattern. dependent : bool(True) If False, specifies no dependence between the output(s) and the input(s). This is only necessary in the case of a sparse global jacobian, because if 'dependent=False' is not specified and declare_partials is not called for a given pair, then a dense matrix of zeros will be allocated in the sparse global jacobian for that pair. In the case of a dense global jacobian it doesn't matter because the space for a dense subjac will always be allocated for every pair. rows : ndarray of int or None Row indices for each nonzero entry. For sparse subjacobians only. cols : ndarray of int or None Column indices for each nonzero entry. For sparse subjacobians only. val : float or ndarray of float or scipy.sparse Value of subjacobian. If rows and cols are not None, this will contain the values found at each (row, col) location in the subjac. """ if dependent and val is not None and not issparse(val): val = np.atleast_1d(val) # np.promote_types will choose the smallest dtype that can contain both arguments safe_dtype = np.promote_types(val.dtype, float) val = val.astype(safe_dtype, copy=False) if dependent and rows is not None: rows = np.array(rows, dtype=int, copy=False) cols = np.array(cols, dtype=int, copy=False) if rows.shape != cols.shape: raise ValueError('rows and cols must have the same shape,' ' rows: {}, cols: {}'.format(rows.shape, cols.shape)) if val is not None and val.shape != (1,) and rows.shape != val.shape: raise ValueError('If rows and cols are specified, val must be a scalar or have the ' 'same shape, val: {}, rows/cols: {}'.format(val.shape, rows.shape)) if val is None: val = np.zeros_like(rows, dtype=float) pattern_matches = self._find_partial_matches(of, wrt) multiple_items = False for of_bundle, wrt_bundle in product(*pattern_matches): of_pattern, of_matches = of_bundle wrt_pattern, wrt_matches = wrt_bundle if not of_matches: raise ValueError('No matches were found for of="{}"'.format(of_pattern)) if not wrt_matches: raise ValueError('No matches were found for wrt="{}"'.format(wrt_pattern)) make_copies = (multiple_items or len(of_matches) > 1 or len(wrt_matches) > 1) # Setting this to true means that future loop iterations (i.e. if there are multiple # items in either of or wrt) will make copies. multiple_items = True for rel_key in product(of_matches, wrt_matches): abs_key = rel_key2abs_key(self, rel_key) if not dependent: if abs_key in self._subjacs_info: del self._subjacs_info[abs_key] continue if abs_key in self._subjacs_info: meta = self._subjacs_info[abs_key] else: meta = SUBJAC_META_DEFAULTS.copy() meta['rows'] = rows meta['cols'] = cols meta['value'] = deepcopy(val) if make_copies else val meta['dependent'] = dependent self._check_partials_meta(abs_key, meta) self._subjacs_info[abs_key] = meta
def _declare_partials(self, of, wrt, dependent=True, rows=None, cols=None, val=None): """ Store subjacobian metadata for later use. Parameters ---------- of : str or list of str The name of the residual(s) that derivatives are being computed for. May also contain a glob pattern. wrt : str or list of str The name of the variables that derivatives are taken with respect to. This can contain the name of any input or output variable. May also contain a glob pattern. dependent : bool(True) If False, specifies no dependence between the output(s) and the input(s). This is only necessary in the case of a sparse global jacobian, because if 'dependent=False' is not specified and declare_partials is not called for a given pair, then a dense matrix of zeros will be allocated in the sparse global jacobian for that pair. In the case of a dense global jacobian it doesn't matter because the space for a dense subjac will always be allocated for every pair. rows : ndarray of int or None Row indices for each nonzero entry. For sparse subjacobians only. cols : ndarray of int or None Column indices for each nonzero entry. For sparse subjacobians only. val : float or ndarray of float or scipy.sparse Value of subjacobian. If rows and cols are not None, this will contain the values found at each (row, col) location in the subjac. """ is_scalar = isscalar(val) if dependent: if rows is None: if val is not None and not is_scalar and not issparse(val): val = atleast_2d(val) val = val.astype(promote_types(val.dtype, float), copy=False) rows_max = cols_max = 0 else: # sparse list format rows = np.array(rows, dtype=INT_DTYPE, copy=False) cols = np.array(cols, dtype=INT_DTYPE, copy=False) if rows.shape != cols.shape: raise ValueError('rows and cols must have the same shape,' ' rows: {}, cols: {}'.format( rows.shape, cols.shape)) if is_scalar: val = np.full(rows.size, val, dtype=float) is_scalar = False elif val is not None: # np.promote_types will choose the smallest dtype that can contain # both arguments val = atleast_1d(val) safe_dtype = promote_types(val.dtype, float) val = val.astype(safe_dtype, copy=False) if rows.shape != val.shape: raise ValueError( 'If rows and cols are specified, val must be a scalar or ' 'have the same shape, val: {}, ' 'rows/cols: {}'.format(val.shape, rows.shape)) else: val = np.zeros_like(rows, dtype=float) if rows.size > 0: if rows.min() < 0: # of, wrt = abs_key2rel_key(self, abs_key) msg = '{}: d({})/d({}): row indices must be non-negative' raise ValueError(msg.format(self.pathname, of, wrt)) if cols.min() < 0: # of, wrt = abs_key2rel_key(self, abs_key) msg = '{}: d({})/d({}): col indices must be non-negative' raise ValueError(msg.format(self.pathname, of, wrt)) rows_max = rows.max() cols_max = cols.max() else: rows_max = cols_max = 0 pattern_matches = self._find_partial_matches(of, wrt) abs2meta = self._var_abs2meta is_array = isinstance(val, ndarray) for of_bundle, wrt_bundle in product(*pattern_matches): of_pattern, of_matches = of_bundle wrt_pattern, wrt_matches = wrt_bundle if not of_matches: raise ValueError( 'No matches were found for of="{}"'.format(of_pattern)) if not wrt_matches: raise ValueError( 'No matches were found for wrt="{}"'.format(wrt_pattern)) for rel_key in product(of_matches, wrt_matches): abs_key = rel_key2abs_key(self, rel_key) if not dependent: if abs_key in self._subjacs_info: del self._subjacs_info[abs_key] continue if abs_key in self._subjacs_info: meta = self._subjacs_info[abs_key] else: meta = SUBJAC_META_DEFAULTS.copy() meta['rows'] = rows meta['cols'] = cols meta['dependent'] = dependent meta['shape'] = shape = (abs2meta[abs_key[0]]['size'], abs2meta[abs_key[1]]['size']) if val is None: # we can only get here if rows is None (we're not sparse list format) meta['value'] = np.zeros(shape) elif is_array: if rows is None and val.shape != shape and val.size == shape[ 0] * shape[1]: meta['value'] = val = val.copy().reshape(shape) else: meta['value'] = val.copy() elif is_scalar: meta['value'] = np.full(shape, val, dtype=float) else: meta['value'] = val if rows_max >= shape[0] or cols_max >= shape[1]: of, wrt = abs_key2rel_key(self, abs_key) msg = '{}: d({})/d({}): Expected {}x{} but declared at least {}x{}' raise ValueError( msg.format(self.pathname, of, wrt, shape[0], shape[1], rows_max + 1, cols_max + 1)) self._check_partials_meta( abs_key, meta['value'], shape if rows is None else (rows.shape[0], 1)) self._subjacs_info[abs_key] = meta
def _declare_partials(self, of, wrt, dependent=True, rows=None, cols=None, val=None): """ Store subjacobian metadata for later use. Parameters ---------- of : str or list of str The name of the residual(s) that derivatives are being computed for. May also contain a glob pattern. wrt : str or list of str The name of the variables that derivatives are taken with respect to. This can contain the name of any input or output variable. May also contain a glob pattern. dependent : bool(True) If False, specifies no dependence between the output(s) and the input(s). This is only necessary in the case of a sparse global jacobian, because if 'dependent=False' is not specified and declare_partials is not called for a given pair, then a dense matrix of zeros will be allocated in the sparse global jacobian for that pair. In the case of a dense global jacobian it doesn't matter because the space for a dense subjac will always be allocated for every pair. rows : ndarray of int or None Row indices for each nonzero entry. For sparse subjacobians only. cols : ndarray of int or None Column indices for each nonzero entry. For sparse subjacobians only. val : float or ndarray of float or scipy.sparse Value of subjacobian. If rows and cols are not None, this will contain the values found at each (row, col) location in the subjac. """ is_scalar = isscalar(val) if dependent: if rows is None: if val is not None and not is_scalar and not issparse(val): val = atleast_2d(val) val = val.astype(promote_types(val.dtype, float), copy=False) rows_max = cols_max = 0 else: # sparse list format rows = np.array(rows, dtype=INT_DTYPE, copy=False) cols = np.array(cols, dtype=INT_DTYPE, copy=False) if rows.shape != cols.shape: raise ValueError('rows and cols must have the same shape,' ' rows: {}, cols: {}'.format(rows.shape, cols.shape)) if is_scalar: val = np.full(rows.size, val, dtype=float) is_scalar = False elif val is not None: # np.promote_types will choose the smallest dtype that can contain # both arguments val = atleast_1d(val) safe_dtype = promote_types(val.dtype, float) val = val.astype(safe_dtype, copy=False) if rows.shape != val.shape: raise ValueError('If rows and cols are specified, val must be a scalar or ' 'have the same shape, val: {}, ' 'rows/cols: {}'.format(val.shape, rows.shape)) else: val = np.zeros_like(rows, dtype=float) if rows.size > 0: if rows.min() < 0: msg = '{}: d({})/d({}): row indices must be non-negative' raise ValueError(msg.format(self.pathname, of, wrt)) if cols.min() < 0: msg = '{}: d({})/d({}): col indices must be non-negative' raise ValueError(msg.format(self.pathname, of, wrt)) rows_max = rows.max() cols_max = cols.max() else: rows_max = cols_max = 0 pattern_matches = self._find_partial_matches(of, wrt) abs2meta = self._var_abs2meta is_array = isinstance(val, ndarray) for of_bundle, wrt_bundle in product(*pattern_matches): of_pattern, of_matches = of_bundle wrt_pattern, wrt_matches = wrt_bundle if not of_matches: raise ValueError('No matches were found for of="{}"'.format(of_pattern)) if not wrt_matches: raise ValueError('No matches were found for wrt="{}"'.format(wrt_pattern)) for rel_key in product(of_matches, wrt_matches): abs_key = rel_key2abs_key(self, rel_key) if not dependent: if abs_key in self._subjacs_info: del self._subjacs_info[abs_key] continue if abs_key in self._subjacs_info: meta = self._subjacs_info[abs_key] else: meta = SUBJAC_META_DEFAULTS.copy() meta['rows'] = rows meta['cols'] = cols meta['dependent'] = dependent meta['shape'] = shape = (abs2meta[abs_key[0]]['size'], abs2meta[abs_key[1]]['size']) if val is None: # we can only get here if rows is None (we're not sparse list format) meta['value'] = np.zeros(shape) elif is_array: if rows is None and val.shape != shape and val.size == shape[0] * shape[1]: meta['value'] = val = val.copy().reshape(shape) else: meta['value'] = val.copy() elif is_scalar: meta['value'] = np.full(shape, val, dtype=float) else: meta['value'] = val if rows_max >= shape[0] or cols_max >= shape[1]: of, wrt = abs_key2rel_key(self, abs_key) msg = '{}: d({})/d({}): Expected {}x{} but declared at least {}x{}' raise ValueError(msg.format(self.pathname, of, wrt, shape[0], shape[1], rows_max + 1, cols_max + 1)) self._check_partials_meta(abs_key, meta['value'], shape if rows is None else (rows.shape[0], 1)) self._subjacs_info[abs_key] = meta