def vjp(g): if axis is None: # If axis is none, np.repeat() repeats the flattened array. expanded = np.reshape(g, (np.prod(shape),) + (repeats,)) return np.reshape(np.sum(expanded, axis=1, keepdims=False), shape) else: if shape[axis] == 1: # For this common case, the logic is simple. return np.sum(g, axis=axis, keepdims=True) else: expanded = np.reshape( g, shape[0 : axis + 1] + (repeats,) + shape[axis + 1 :] ) return np.sum(expanded, axis=axis + 1, keepdims=False)
def repeat_to_match_shape(g, shape, dtype, axis, keepdims): """Returns the array g repeated along axis to fit vector space vs. Also returns the number of repetitions of the array.""" with ua.set_backend(numpy_backend, coerce=True): if shape == (): return g, 1 axis = list(axis) if isinstance(axis, tuple) else axis new_shape = np.array(shape, dtype=int) new_shape[axis] = 1 num_reps = np.prod(np.array(shape)[axis]) return np.broadcast_to(np.reshape(g, new_shape), shape), num_reps
def to(self, x, grad_variables=None, jacobian=False): """ Calculate the VJP or Jacobian matrix of self to x. Parameters ---------- x : VJPDiffArray The denominator in derivative. grad_variables : VJPDiffArray Gradient of the numerator in derivative. jacobian : bool Flag identifies whether to calculate the jacobian logo. If set ``True``, it will return jacobian matrix instead of vjp. Examples -------- >>> with ua.set_backend(udiff.DiffArrayBackend(numpy_backend), coerce=True): ... ... x1 = np.array([2]) ... x2 = np.array([5]) ... y = np.log(x1) + x1 * x2 - np.sin(x2) ... x1_diff = y.to(x1) ... print(np.allclose(x1_diff.value, [5.5])) True """ if jacobian: if x._jacobian is None or self not in x._jacobian: for position in itertools.product( *[range(i) for i in np.shape(self)]): grad_variables = np.zeros_like(self.value) grad_variables.value[position] = 1 self._backward_jacobian(grad_variables, self, position, x) x._jacobian[self] = np.reshape( np.stack(x._jacobian[self].values()), np.shape(self) + np.shape(x)) return x._jacobian[self] else: if x._diff is None or self not in x._diff: self._backward(grad_variables, self, x) return x._diff[self]
def grad_reshape_list(ans, *arys): if len(arys) > 1: raise NotImplementedError("Can't handle multiple arguments yet.") return lambda g: np.reshape(g, np.shape(arys[0]))
def vjp(g): if axis: return reverse_axis(np.cumsum(reverse_axis(g, axis), axis), axis) else: return np.reshape(np.cumsum(g[::-1], axis)[::-1], x.shape)
def vjp(g): for axis, rep in enumerate(reps): g = sum(np.split(g, rep, axis)) return np.reshape(g, x_shape)
defvjp(np.arctanh, lambda ans, x: lambda g: g / (1 - x ** 2)) defvjp(np.rad2deg, lambda ans, x: lambda g: g / np.pi * 180.0) defvjp(np.degrees, lambda ans, x: lambda g: g / np.pi * 180.0) defvjp(np.deg2rad, lambda ans, x: lambda g: g * np.pi / 180.0) defvjp(np.radians, lambda ans, x: lambda g: g * np.pi / 180.0) defvjp(np.square, lambda ans, x: lambda g: g * 2 * x) defvjp(np.sqrt, lambda ans, x: lambda g: g * 0.5 * x ** -0.5) defvjp( np.sinc, lambda ans, x: lambda g: g * (np.cos(np.pi * x) * np.pi * x - np.sin(np.pi * x)) / (np.pi * x ** 2), ) defvjp( np.reshape, lambda ans, x, shape, order=None: lambda g: np.reshape(g, np.shape(x), order=order), ) defvjp( np.roll, lambda ans, x, shift, axis=None: lambda g: np.roll(g, -shift, axis=axis) ) defvjp( np.array_split, lambda ans, ary, idxs, axis=0: lambda g: np.concatenate(g, axis=axis), ) defvjp(np.split, lambda ans, ary, idxs, axis=0: lambda g: np.concatenate(g, axis=axis)) defvjp(np.vsplit, lambda ans, ary, idxs: lambda g: np.concatenate(g, axis=0)) defvjp(np.hsplit, lambda ans, ary, idxs: lambda g: np.concatenate(g, axis=1)) defvjp(np.dsplit, lambda ans, ary, idxs: lambda g: np.concatenate(g, axis=2)) defvjp( np.ravel, lambda ans, x, order=None: lambda g: np.reshape(g, np.shape(x), order=order),
(DaskBackend, np.lexsort), (DaskBackend, np.partition), (DaskBackend, np.argpartition), (DaskBackend, np.sort_complex), (DaskBackend, np.msort), (DaskBackend, np.searchsorted), } @pytest.fixture(scope="session", params=LIST_BACKENDS) def backend(request): backend = request.param return backend x = np.reshape(np.arange(25), (5, 5)) @pytest.mark.parametrize( "method, y_d", [(np.positive, lambda x: 1), (np.negative, lambda x: -1), (np.exp, lambda x: pow(e, x)), (np.exp2, lambda x: pow(2, x) * log(2)), (np.log, lambda x: 1 / x), (np.log2, lambda x: 1 / (x * log(2))), (np.log10, lambda x: 1 / (x * log(10))), (np.sqrt, lambda x: 0.5 * pow(x, -0.5)), (np.square, lambda x: 2 * x), (np.cbrt, lambda x: 1 / 3 * pow(x, -2 / 3)), (np.reciprocal, lambda x: -1 / pow(x, 2)), (np.sin, lambda x: cos(x)), (np.cos, lambda x: -sin(x)), (np.tan, lambda x: 1 / cos(x)**2), (np.arcsin, lambda x: 1 / sqrt(1 - x**2)), (np.arccos, lambda x: -1 / sqrt(1 - x**2)), (np.arctan, lambda x: 1 / (1 + x**2)),
def to(self, x, grad_variables=None, jacobian=False): """ Calculate the JVP or Jacobian matrix of self to x. Parameters ---------- x : JVPDiffArray The denominator in derivative. grad_variables : JVPDiffArray Gradient assigned to the x. jacobian : bool Flag identifies whether to calculate the jacobian logo. If set ``True``, it will return jacobian matrix instead of jvp. Examples -------- >>> with ua.set_backend(udiff.DiffArrayBackend(numpy_backend, mode="jvp"), coerce=True): ... ... x1 = np.array([2]) ... x2 = np.array([5]) ... y = np.log(x1) + x1 * x2 - np.sin(x2) ... x1_diff = y.to(x1) ... print(np.allclose(x1_diff, [5.5])) True """ if self._jvp and x not in self._jvp: raise ValueError("Please check if the base is correct.") if jacobian: if self._jacobian is None: self._jacobian = {} if x not in self._jacobian: self._jacobian[x] = {} for position in itertools.product( *[range(i) for i in np.shape(x)]): grad_variables = np.zeros_like(x) grad_variables.value[position] = 1 self._jacobian[x][position] = self._forward( x, grad_variables) old_axes = tuple(range(np.ndim(self) + np.ndim(x))) new_axes = old_axes[np.ndim(x):] + old_axes[:np.ndim(x)] self._jacobian[x] = np.transpose( np.reshape( np.stack(self._jacobian[x].values()), np.shape(x) + np.shape(self), ), new_axes, ) return self._jacobian[x] else: if self._diff is None: self._diff = {} if x not in self._diff: if grad_variables is None: grad_variables = np.ones_like(self) self._diff[x] = self._forward(x, grad_variables) return self._diff[x]