def taylor_series_expansion(expr, diff_mode=differentiate.Modes.reverse_numeric, order=1): """ Generate a taylor series approximation for expr. Parameters ---------- expr: pyomo.core.expr.numeric_expr.ExpressionBase diff_mode: pyomo.core.expr.calculus.derivatives.Modes The method for differentiation. order: The order of the taylor series expansion If order is not 1, then symbolic differentiation must be used (differentiation.Modes.reverse_sybolic or differentiation.Modes.sympy). Returns ------- res: pyomo.core.expr.numeric_expr.ExpressionBase """ if order < 0: raise ValueError( 'Cannot compute taylor series expansion of order {0}'.format( str(order))) if order != 1 and diff_mode is differentiate.Modes.reverse_numeric: logger.warning( 'taylor_series_expansion can only use symbolic differentiation for orders larger than 1' ) diff_mode = differentiate.Modes.reverse_symbolic e_vars = list(identify_variables(expr=expr, include_fixed=False)) res = value(expr) if order >= 1: derivs = differentiate(expr=expr, wrt_list=e_vars, mode=diff_mode) res += sum( value(derivs[i]) * (e_vars[i] - e_vars[i].value) for i in range(len(e_vars))) """ This last bit of code is just for higher order taylor series expansions. The recursive function _loop modifies derivs in place so that derivs becomes a list of lists of lists... However, _loop is also a generator so that we don't have to loop through it twice. _loop yields two lists. The first is a list of indices corresponding to the first k-1 variables that differentiation is being done with respect to. The second is a list of derivatives. Each entry in this list is the derivative with respect to the first k-1 variables and the kth variable, whose index matches the index in _derivs. """ if order >= 2: for n in range(2, order + 1): coef = 1.0 / math.factorial(n) for ndx_list, _derivs in _loop(derivs, e_vars, diff_mode, list()): tmp = coef for ndx in ndx_list: tmp *= (e_vars[ndx] - e_vars[ndx].value) res += tmp * sum( value(_derivs[i]) * (e_vars[i] - e_vars[i].value) for i in range(len(e_vars))) return res
def check_convexity_conditions(self, relax=False): """Returns True if all convexity conditions for the conic constraint are satisfied. If relax is True, then variable domains are ignored and it is assumed that all variables are continuous.""" return (relax or \ (self.x1.is_continuous() and \ self.x2.is_continuous() and \ self.r.is_continuous())) and \ (self.x2.has_ub() and value(self.x2.ub) <= 0) and \ (self.r.has_lb() and value(self.r.lb) >= 0)
def check_convexity_conditions(self, relax=False): """Returns True if all convexity conditions for the conic constraint are satisfied. If relax is True, then variable domains are ignored and it is assumed that all variables are continuous.""" alpha = value(self.alpha, exception=False) return (relax or \ (self.r1.is_continuous() and \ self.r2.is_continuous() and \ all(xi.is_continuous() for xi in self.x))) and \ (self.r1.has_lb() and value(self.r1.lb) >= 0) and \ (self.r2.has_lb() and value(self.r2.lb) >= 0) and \ ((alpha is not None) and (0 < alpha < 1))
def __call__(self, exception=True): try: # we wrap the result with value(...) as the # alpha term used by some of the constraints # may be a parameter return value( self._body_function(*self._body_function_variables( values=True))) except (ValueError, TypeError): if exception: raise ValueError("one or more terms " "could not be evaluated") return None
def __call__(self, exception=True): try: # we wrap the result with value(...) as the # alpha term used by some of the constraints # may be a parameter return value(self._body_function( *self._body_function_variables(values=True))) except (ValueError, TypeError): if exception: raise ValueError("one or more terms " "could not be evaluated") return None