def if_else( cond: Union[MX, SX, DM, float], if_true: Union[MX, SX, DM, float], if_false: Union[MX, SX, DM, float], b: int = 10000, ): return if_true + (if_false - if_true) * (0.5 + 0.5 * tanh(b * cond))
def mpc(): opti = casadi.Opti() N = 10 #horizon length wu = 0.1 #weight of control effort (u) wx = 1 #weight of x's du_bounds = 0.15 r = 5 #setpoint xi = 0 xs = [] dus = [] for i in range(0, N): xs.append(opti.variable()) dus.append(opti.variable()) #Actuator effort constraints opti.subject_to(dus[-1] >= -du_bounds) opti.subject_to(dus[-1] <= du_bounds) #Dynamics constraint. Nessesary to have a different first constraint since the first x isn't a decsision variable #Dynamics are copied/pasted from the plant_dynamics_damaged_simple function since the casadi solver cannot have function calls in it if i == 0: opti.subject_to( xs[-1] - (1 * xi + 3 * dus[-1] + casadi.tanh(xi * 0.003) * 10 + dus[-1] * 5 * casadi.sin(xi * 0.2)) == 0) dus.append(opti.variable()) opti.subject_to(dus[-1] >= -du_bounds) opti.subject_to(dus[-1] <= du_bounds) else: opti.subject_to( xs[-1] - (1 * xs[-2] + 3 * dus[-2] + casadi.tanh(xs[-2] * 0.003) * 10 + dus[-2] * 5 * casadi.sin(xs[-2] * 0.2)) == 0) #Cost function for MPC taken from https://en.wikipedia.org/wiki/Model_predictive_control J = wx * (r - xi)**2 + wu * dus[0]**2 for i in range(1, N): J += wx * (r - xs[i - 1])**2 + wu * dus[i]**2 opti.minimize(J) opti.solver('ipopt') sol = opti.solve() print("xs " + str(xi) + " dus: " + str(sol.value(dus[0]))) for i in range(0, len(xs)): print("xs " + str(sol.value(xs[i])) + " dus: " + str(sol.value(dus[i + 1])))
def sigmoid(x, sigmoid_type: str = "tanh", normalization_range: Tuple[Union[float, int], Union[float, int]] = (0, 1)): """ A sigmoid function. From Wikipedia (https://en.wikipedia.org/wiki/Sigmoid_function): A sigmoid function is a mathematical function having a characteristic "S"-shaped curve or sigmoid curve. Args: x: The input sigmoid_type: Type of sigmoid function to use [str]. Can be one of: * * * normalization_type: Range in which to normalize the sigmoid, shorthanded here in the documentation as "N". This parameter is given as a two-element tuple (min, max). After normalization: >>> sigmoid(-Inf) == normalization_range[0] >>> sigmoid(Inf) == normalization_range[1] * In the special case of N = (0, 1): >>> sigmoid(-Inf) == 0 >>> sigmoid(Inf) == 1 >>> sigmoid(0) == 0.5 >>> d(sigmoid)/dx at x=0 == 0.5 * In the special case of N = (-1, 1): >>> sigmoid(-Inf) == -1 >>> sigmoid(Inf) == 1 >>> sigmoid(0) == 0 >>> d(sigmoid)/dx at x=0 == 1 Returns: The value of the sigmoid. """ ### Sigmoid equations given here under the (-1, 1) normalization: if sigmoid_type == ("tanh" or "logistic"): # Note: tanh(x) is simply a scaled and shifted version of a logistic curve; after # normalization these functions are identical. s = cas.tanh(x) elif sigmoid_type == "arctan": s = 2 / cas.pi * cas.arctan(cas.pi / 2 * x) elif sigmoid_type == "polynomial": s = x / (1 + x**2)**0.5 else: raise ValueError("Bad value of parameter 'type'!") ### Normalize min = normalization_range[0] max = normalization_range[1] s_normalized = s * (max - min) / 2 + (max + min) / 2 return s_normalized
def Initialize(self): """ Defines the parameters of the model as symbolic casadi variables and the model equation as casadi function. Model parameters are initialized randomly. Returns ------- None. """ dim_u = self.dim_u dim_hidden = self.dim_hidden dim_x = self.dim_x name = self.name u = cs.MX.sym('u', dim_u, 1) x = cs.MX.sym('x', dim_x, 1) # Model Parameters W_h = cs.MX.sym('W_h', dim_hidden, dim_u + dim_x) b_h = cs.MX.sym('b_h', dim_hidden, 1) W_o = cs.MX.sym('W_out', dim_x, dim_hidden) b_o = cs.MX.sym('b_out', dim_x, 1) # Put all Parameters in Dictionary with random initialization self.Parameters = { 'W_h': np.random.rand(W_h.shape[0], W_h.shape[1]), 'b_h': np.random.rand(b_h.shape[0], b_h.shape[1]), 'W_o': np.random.rand(W_o.shape[0], W_o.shape[1]), 'b_o': np.random.rand(b_o.shape[0], b_o.shape[1]) } # Model Equations h = cs.tanh(cs.mtimes(W_h, cs.vertcat(u, x)) + b_h) x_new = cs.mtimes(W_o, h) + b_o input = [x, u, W_h, b_h, W_o, b_o] input_names = ['x', 'u', 'W_h', 'b_h', 'W_o', 'b_o'] output = [x_new] output_names = ['x_new'] self.Function = cs.Function(name, input, output, input_names, output_names) return None
def _convert(self, symbol, t, y, y_dot, inputs): """ See :meth:`CasadiConverter.convert()`. """ if isinstance( symbol, ( pybamm.Scalar, pybamm.Array, pybamm.Time, pybamm.InputParameter, pybamm.ExternalVariable, ), ): return casadi.MX(symbol.evaluate(t, y, y_dot, inputs)) elif isinstance(symbol, pybamm.StateVector): if y is None: raise ValueError("Must provide a 'y' for converting state vectors") return casadi.vertcat(*[y[y_slice] for y_slice in symbol.y_slices]) elif isinstance(symbol, pybamm.StateVectorDot): if y_dot is None: raise ValueError("Must provide a 'y_dot' for converting state vectors") return casadi.vertcat(*[y_dot[y_slice] for y_slice in symbol.y_slices]) elif isinstance(symbol, pybamm.BinaryOperator): left, right = symbol.children # process children converted_left = self.convert(left, t, y, y_dot, inputs) converted_right = self.convert(right, t, y, y_dot, inputs) if isinstance(symbol, pybamm.Minimum): return casadi.fmin(converted_left, converted_right) if isinstance(symbol, pybamm.Maximum): return casadi.fmax(converted_left, converted_right) # _binary_evaluate defined in derived classes for specific rules return symbol._binary_evaluate(converted_left, converted_right) elif isinstance(symbol, pybamm.UnaryOperator): converted_child = self.convert(symbol.child, t, y, y_dot, inputs) if isinstance(symbol, pybamm.AbsoluteValue): return casadi.fabs(converted_child) return symbol._unary_evaluate(converted_child) elif isinstance(symbol, pybamm.Function): converted_children = [ self.convert(child, t, y, y_dot, inputs) for child in symbol.children ] # Special functions if symbol.function == np.min: return casadi.mmin(*converted_children) elif symbol.function == np.max: return casadi.mmax(*converted_children) elif symbol.function == np.abs: return casadi.fabs(*converted_children) elif symbol.function == np.sqrt: return casadi.sqrt(*converted_children) elif symbol.function == np.sin: return casadi.sin(*converted_children) elif symbol.function == np.arcsinh: return casadi.arcsinh(*converted_children) elif symbol.function == np.arccosh: return casadi.arccosh(*converted_children) elif symbol.function == np.tanh: return casadi.tanh(*converted_children) elif symbol.function == np.cosh: return casadi.cosh(*converted_children) elif symbol.function == np.sinh: return casadi.sinh(*converted_children) elif symbol.function == np.cos: return casadi.cos(*converted_children) elif symbol.function == np.exp: return casadi.exp(*converted_children) elif symbol.function == np.log: return casadi.log(*converted_children) elif symbol.function == np.sign: return casadi.sign(*converted_children) elif isinstance(symbol.function, (PchipInterpolator, CubicSpline)): return casadi.interpolant("LUT", "bspline", [symbol.x], symbol.y)( *converted_children ) elif symbol.function.__name__.startswith("elementwise_grad_of_"): differentiating_child_idx = int(symbol.function.__name__[-1]) # Create dummy symbolic variables in order to differentiate using CasADi dummy_vars = [ casadi.MX.sym("y_" + str(i)) for i in range(len(converted_children)) ] func_diff = casadi.gradient( symbol.differentiated_function(*dummy_vars), dummy_vars[differentiating_child_idx], ) # Create function and evaluate it using the children casadi_func_diff = casadi.Function("func_diff", dummy_vars, [func_diff]) return casadi_func_diff(*converted_children) # Other functions else: return symbol._function_evaluate(converted_children) elif isinstance(symbol, pybamm.Concatenation): converted_children = [ self.convert(child, t, y, y_dot, inputs) for child in symbol.children ] if isinstance(symbol, (pybamm.NumpyConcatenation, pybamm.SparseStack)): return casadi.vertcat(*converted_children) # DomainConcatenation specifies a particular ordering for the concatenation, # which we must follow elif isinstance(symbol, pybamm.DomainConcatenation): slice_starts = [] all_child_vectors = [] for i in range(symbol.secondary_dimensions_npts): child_vectors = [] for child_var, slices in zip( converted_children, symbol._children_slices ): for child_dom, child_slice in slices.items(): slice_starts.append(symbol._slices[child_dom][i].start) child_vectors.append( child_var[child_slice[i].start : child_slice[i].stop] ) all_child_vectors.extend( [v for _, v in sorted(zip(slice_starts, child_vectors))] ) return casadi.vertcat(*all_child_vectors) else: raise TypeError( """ Cannot convert symbol of type '{}' to CasADi. Symbols must all be 'linear algebra' at this stage. """.format( type(symbol) ) )
def traverse(node, casadi_syms, rootnode): #print node #print node.args #print len(node.args) if len(node.args)==0: # Handle symbols if(node.is_Symbol): return casadi_syms[node.name] # Handle numbers and constants if node.is_Zero: return 0 if node.is_Number: return float(node) trig = sympy.functions.elementary.trigonometric if len(node.args)==1: # Handle unary operators child = traverse(node.args[0], casadi_syms, rootnode) # Recursion! if type(node) == trig.cos: return casadi.cos(child) if type(node) == trig.sin: return casadi.sin(child) if type(node) == trig.tan: return casadi.tan(child) if type(node) == trig.cosh: return casadi.cosh(child) if type(node) == trig.sinh: return casadi.sinh(child) if type(node) == trig.tanh: return casadi.tanh(child) if type(node) == trig.cot: return 1/casadi.tan(child) if type(node) == trig.acos: return casadi.arccos(child) if type(node) == trig.asin: return casadi.arcsin(child) if type(node) == trig.atan: return casadi.arctan(child) if len(node.args)==2: # Handle binary operators left = traverse(node.args[0], casadi_syms, rootnode) # Recursion! right = traverse(node.args[1], casadi_syms, rootnode) # Recursion! if node.is_Pow: return left**right if type(node) == trig.atan2: return casadi.arctan2(left,right) if len(node.args)>=2: # Handle n-ary operators child_generator = ( traverse(arg,casadi_syms,rootnode) for arg in node.args ) if node.is_Add: return reduce(lambda x, y: x+y, child_generator) if node.is_Mul: return reduce(lambda x, y: x*y, child_generator) if node!=rootnode: raise Exception("No mapping to casadi for node of type " + str(type(node)))
nx = model.x.size()[0] nu = model.u.size()[0] ny = nx + nu ny_e = nx N = 40 # set dimensions ocp.dims.N = N # set cost ocp.cost.cost_type = 'EXTERNAL' ocp.cost.cost_type_e = 'EXTERNAL' W_u = 1e-3 theta = model.x[1] ocp.model.cost_expr_ext_cost = tanh(theta)**2 + .5 * (model.x[0]**2 + W_u * model.u**2) ocp.model.cost_expr_ext_cost_e = tanh(theta)**2 + .5 * model.x[0]**2 custom_hess_u = W_u J = horzcat(SX.eye(2), SX(2, 2)) print(DM(J.sparsity())) # diagonal matrix with second order terms of outer loss function. D = SX.sym('D', Sparsity.diag(2)) D[0, 0] = 1 [hess_tan, grad_tan] = hessian(tanh(theta)**2, theta) D[1, 1] = if_else(theta == 0, hess_tan, grad_tan / theta)
def act_tanh(u): return cas.tanh(u)
def Initialize(self): """ Defines the parameters of the model as symbolic casadi variables and the model equation as casadi function. Model parameters are initialized randomly. Returns ------- None. """ dim_u = self.dim_u dim_c = self.dim_c dim_hidden = self.dim_hidden dim_out = self.dim_out name = self.name u = cs.MX.sym('u',dim_u,1) c = cs.MX.sym('c',dim_c,1) # Parameters # RNN part W_r = cs.MX.sym('W_r',dim_c,dim_u+dim_c) b_r = cs.MX.sym('b_r',dim_c,1) W_z = cs.MX.sym('W_z',dim_c,dim_u+dim_c) b_z = cs.MX.sym('b_z',dim_c,1) W_c = cs.MX.sym('W_c',dim_c,dim_u+dim_c) b_c = cs.MX.sym('b_c',dim_c,1) # MLP part W_h = cs.MX.sym('W_z',dim_hidden,dim_c) b_h = cs.MX.sym('b_z',dim_hidden,1) W_o = cs.MX.sym('W_c',dim_out,dim_hidden) b_o = cs.MX.sym('b_c',dim_out,1) # Put all Parameters in Dictionary with random initialization self.Parameters = {'W_r':np.random.rand(W_r.shape[0],W_r.shape[1]), 'b_r':np.random.rand(b_r.shape[0],b_r.shape[1]), 'W_z':np.random.rand(W_z.shape[0],W_z.shape[1]), 'b_z':np.random.rand(b_z.shape[0],b_z.shape[1]), 'W_c':np.random.rand(W_c.shape[0],W_c.shape[1]), 'b_c':np.random.rand(b_c.shape[0],b_c.shape[1]), 'W_h':np.random.rand(W_h.shape[0],W_h.shape[1]), 'b_h':np.random.rand(b_h.shape[0],b_h.shape[1]), 'W_o':np.random.rand(W_o.shape[0],W_o.shape[1]), 'b_o':np.random.rand(b_o.shape[0],b_o.shape[1])} # Equations f_r = logistic(cs.mtimes(W_r,cs.vertcat(u,c))+b_r) f_z = logistic(cs.mtimes(W_z,cs.vertcat(u,c))+b_z) c_r = f_r*c f_c = cs.tanh(cs.mtimes(W_c,cs.vertcat(u,c_r))+b_c) c_new = f_z*c+(1-f_z)*f_c h = cs.tanh(cs.mtimes(W_h,c_new)+b_h) x_new = cs.mtimes(W_o,h)+b_o # Casadi Function input = [c,u,W_r,b_r,W_z,b_z,W_c,b_c,W_h,b_h,W_o,b_o] input_names = ['c','u','W_r','b_r','W_z','b_z','W_c','b_c','W_h','b_h', 'W_o','b_o'] output = [c_new,x_new] output_names = ['c_new','x_new'] self.Function = cs.Function(name, input, output, input_names,output_names) return None
def logistic(x): y = 0.5 + 0.5 * cs.tanh(0.5*x) return y