def __init__(self, val, der=1): """ Returns a Vector variable with user defined value and derivative INPUTS ======= val: list of floats, compulsory Value of the Vector variable der: float, optional, default value is 1 Derivative of the Vector variable/function of a variable RETURNS ======== Vector class instance NOTES ===== PRE: - val and der have numeric type and val must be a list - two or fewer inputs POST: returns a Vector class instance with value = val and derivative = der """ self._val = np.array(val) self._jacobian = der * np.eye(len(val)) scalars = [None] * len(val) for i in range(len(val)): scalars[i] = Scalar(val[i]) # Initialize the jacobians for var in scalars: var.init_jacobian(scalars) self._scalars = scalars
def arccosh(x): """ Returns a constant, Scalar, or Vector object that is the arccosh of the user specified value. INPUTS ======= val: real valued numeric type RETURNS ======= Scalar or Vector class instance NOTES ====== If the input value is a constant, each operator method returns a constant with the operation applied. If the input value is a Scalar object, the operator method applies the operator to the value and propagates the derivative through the chain rule, wrapping the results in a new Scalar object. If the input value is a vector, the operator method updates the value of the element, and the jacobian of the vector, returning a new vector object with these properties. """ try: j = x._jacobian except AttributeError: return np.arccosh(x) # If x is a constant else: try: k = j.keys() # To tell whether x is a scalar or vector except AttributeError: new = Vector(np.arccosh(x._val), x._jacobian) # If x is a vector variable try: dict_self = x._dict.copy( ) # If x is a complex vector variable, it will update the original dictionary for key in dict_self.keys(): dict_self[key] = dict_self[key] * -np.arccosh( x._val) * np.tanh(x._val) new._dict = dict_self return new except AttributeError: derivative = Counter() derivative[x] = x._jacobian * -np.arccosh( x._val) * np.tanh(x._val) new._dict = derivative # If x is not a complex vector variable, it will add an attribute to the new variable return new else: jacobian = { k: x.partial(k) * -np.arccosh(x._val) * np.tanh(x._val) for k in x._jacobian.keys() } return Scalar(np.arccosh(x._val), jacobian)
def create_scalar(vals): """ Return Scalar object(s) with user defined value(s). INPUTS ======= vals: a list of numeric types or a single numeric type value RETURNS ======= scalar: if vals is a single numeric type, scalar, a Scalar instance with user defined value, is returned as a single object scalars: if vals is a list of numeric types, scalars, a list of Scalar objects with values corresponding to vals, is returned as a list NOTES ====== This method initializes all Scalar objects desired by the user with user defined value. Before returning these Scalar objects, this method solidifies the variable 'universe' by seeding the jacobians of each of the Scalar objects with appropriate values with respect to all Scalars requested by the user. """ try: scalars = [None] * len(vals) for i in range(len(vals)): scalars[i] = Scalar(vals[i]) # Initialize the jacobians for var in scalars: var.init_jacobian(scalars) return scalars except TypeError: scalar = Scalar(vals) scalar.init_jacobian([scalar]) return scalar
from Dotua.nodes.scalar import Scalar import numpy as np ''' Initialize local variables for testing. Since these tests need to be independent of AutoDiff, we will simulate the initalization process by calling init_jacobian once the entire 'universe' of variables has been defined ''' # Define scalar objects vars = x, y = Scalar(1), Scalar(2) a, b = x.eval(), y.eval() for var in vars: var.init_jacobian(vars) # Define functions of the scalar objects f_1 = x + y f_2 = y + x f_3 = x - y f_4 = y - x f_5 = x * y f_6 = y * x f_7 = x / y f_8 = y / x # Slightly more complicated functions g_1 = 10 * x + y / 2 + 1000 g_2 = -2 * x * x - 1 / y # Exponential functions h_1 = x**2 h_2 = 2**x h_3 = x**y