def sum(t: Tensor, axis=None, keepdims=False): """ Sums all the elements in tensor along given axis ## Parameters: t: `Tensor` axis: `int` - defaults to None keepdims: `bool` - defaults to False ## Example usage ```python from beacon.tensor import Tensor from beacon.tensor import functions as fn t = Tensor([1, 2, 3]) x = fn.sum(t) ``` """ data = np.sum(t.data, axis=axis, keepdims=keepdims) requires_grad = t.requires_grad and not Tensor.NO_GRAD nodes = [] if requires_grad: nodes.append(Tensor.ComputationalGraphNode(tensor=t, df=lambda x: _match_shape(x, t.data.shape, axis, keepdims)[0])) return Tensor(data=data, requires_grad=requires_grad, nodes=nodes)
def divide(t1: Tensor, t2: Tensor): """ Divides two tensors. ## Parameters: t1: `Tensor` - first tensor t2: `Tensor` - second tensor ## Example usage ```python from beacon.tensor import Tensor from beacon.tensor import functions as fn t1 = Tensor([1, 2, 3]) t2 = Tensor([4, 5, 6]) x = fn.divide(t1, t2) ``` """ data = t1.data / t2.data requires_grad = (t1.requires_grad or t2.requires_grad) and not Tensor.NO_GRAD nodes = [] if t1.requires_grad: nodes.append(Tensor.ComputationalGraphNode(tensor=t1, df=lambda x: _broadcast(t1.grad.data, x /t2.data))) if t2.requires_grad: nodes.append(Tensor.ComputationalGraphNode(tensor=t2, df=lambda x: _broadcast(t2.grad.data, -x * t1.data/ t2.data**2 ))) return Tensor(data=data, requires_grad=requires_grad, nodes=nodes)
def forward(self, x): if self.train_mode: activation_mask = Tensor(data=np.random.rand(*(x.shape)) / (1 - self.dropout_rate), requires_grad=True) x = fn.mul(x, activation_mask > self.dropout_rate) return x
def tan(t: Tensor): """ Applies tan function to all the elements of the input tensor. ## Parameters: t: `Tensor` - input tensor ## Example usage ```python from beacon.tensor import Tensor from beacon.tensor import functions as fn t = Tensor([1, 2, 3]) x = fn.tan(t) ``` """ data = np.tan(t.data) requires_grad = t.requires_grad and not Tensor.NO_GRAD nodes = [] if requires_grad: nodes.append(Tensor.ComputationalGraphNode(tensor=t, df=lambda x: x / np.cos(t.data)**2)) return Tensor(data=data, requires_grad=requires_grad, nodes=nodes)
def neg(t: Tensor): """ Unary negation of tensor elements. ## Parameters: t: `Tensor` - input tensor ## Example usage ```python from beacon.tensor import Tensor from beacon.tensor import functions as fn t = Tensor([1, 2, 3]) x = -t ``` """ data = -t.data requires_grad = t.requires_grad and not Tensor.NO_GRAD nodes = [] if requires_grad: nodes.append(Tensor.ComputationalGraphNode(tensor=t, df=lambda x: -x)) return Tensor(data=data, requires_grad=requires_grad, nodes=nodes)
def to_tensor(x): """ Convert input parameter to tensor if it isn't already. ## Parameters x: `Tensor-like` - input parameter ## Example usage ```python from beacon.tensor import functinos as fn t = fn.to_tensor(10.0) ``` """ return Tensor._to_tensor(x)
def reshape(t: Tensor, shape): """ Reshapes tensor. ## Parameters: t: `Tensor` - input tensor shape: `tuple` - new shape ## Example usage ```python from beacon.tensor import Tensor from beacon.tensor import functions as fn t = Tensor([1, 2, 3], [4, 5, 6]) x = fn.reshape(t, shape=(1, 6)) ``` """ data = np.reshape(t.data, newshape=shape) requires_grad = t.requires_grad and not Tensor.NO_GRAD nodes = [] if requires_grad: nodes.append(Tensor.ComputationalGraphNode(tensor=t, df=lambda x: np.reshape(x, np.shape(t.data)))) return Tensor(data=data, requires_grad=requires_grad, nodes=nodes)