Example #1
0
def where(condition: Tensor, t1: Tensor=None, t2: Tensor=None):
    """
    Numpy where function equivalent.

    ## Parameters:
    condition: `Tensor` - condition tensor

    t1: `Tensor` - tensor from which to take elements if condition is met, defaults to None
    
    t2: `Tensor` - tensor from which to take elements if condition is not met, defaults to None

    ## Example usage
    ```python
    from beacon.tensor import Tensor
    from beacon.tensor import functions as fn
    t1 = Tensor([1, 2, 3])
    t2 = Tensor([4, 5, 6])
    x = fn.where(t1 < 2, t1, t2)
    ```
    """
    t1 = to_tensor(t1) if t1 is not None else None
    t2 = to_tensor(t2) if t2 is not None else None
    data = np.where(condition.data, t1.data if t1 else None, t2.data if t2 else None)
    t1g = to_tensor(t1).requires_grad if t1 else False
    t2g = to_tensor(t2).requires_grad if t2 else False
    requires_grad = (t1g or t2g) and not Tensor.NO_GRAD
    nodes = []
    if t1g:
        nodes.append(Tensor.ComputationalGraphNode(tensor=t1, df=lambda x: np.where(condition.data, x, np.zeros_like(x))))
    if t2g:
        nodes.append(Tensor.ComputationalGraphNode(tensor=t1, df=lambda x: np.where(condition.data, np.zeros_like(x), x)))
    return Tensor(data=data, requires_grad=requires_grad, nodes=nodes)
Example #2
0
def mul(t1: Tensor, t2: Tensor):
    """
    Multiplites two tensors (dot-product).

    ## Parameters:
    t1: `Tensor` - first tensor

    t2: `Tensor` - second tensor

    ## Example usage
    ```python
    from beacon.tensor import Tensor
    from beacon.tensor import functions as fn
    t1 = Tensor([1, 2, 3])
    t2 = Tensor([4, 5, 6])
    x = fn.mul(t1, t2)
    ```
    """
    data = t1.data * t2.data
    requires_grad = (t1.requires_grad or t2.requires_grad) and not Tensor.NO_GRAD
    nodes = []
    if t1.requires_grad:
        nodes.append(Tensor.ComputationalGraphNode(tensor=t1, df=lambda x: _broadcast(t1.grad.data, t2.data*x)))
    if t2.requires_grad:
        nodes.append(Tensor.ComputationalGraphNode(tensor=t2, df=lambda x: _broadcast(t2.grad.data, t1.data*x)))
    return Tensor(data=data, requires_grad=requires_grad, nodes=nodes)
Example #3
0
def conv(x: Tensor, w: Tensor, stride: tuple, padding: tuple):
    """
    Performs convolution.

    ## Parameters
    x: `Tensor` - input tensor

    w: `Tensor` - filter

    stride: `tuple` - (vertical stride, horizontal stride)

    padding: `tuple` - (vertical padding, horizontal padding)

    ## Example usage
    ```python
    from beacon.tensor import Tensor
    from beacon.tensor import functions as fn
    import numpy as np

    x = Tensor(data=np.random.rand(32, 1, 28, 28), requires_grad=True)
    w = Tensor(data=np.random.rand(10, 1, 5, 5), requires_grad=True)
    output = fn.conv(x, w, (1, 1), (0, 0))
    ```
    """
    data = _conv(x.data, w.data, stride, padding)
    requires_grad = (x.requires_grad or w.requires_grad) and not Tensor.NO_GRAD
    nodes = []
    if x.requires_grad:
        nodes.append(Tensor.ComputationalGraphNode(tensor=x, df=lambda g: _conv_grad(0, g, x.data, w.data, stride, padding)))
    if w.requires_grad:
        nodes.append(Tensor.ComputationalGraphNode(tensor=w, df=lambda g: _conv_grad(1, g, x.data, w.data, stride, padding)))
    return Tensor(data=data, requires_grad=requires_grad, nodes=nodes)
Example #4
0
def power(t1: Tensor, t2: Tensor):
    """
    Raises first tensor to the power of second tensor.

    ## Parameters:
    t1: `Tensor` - first tensor

    t2: `Tensor` - second tensor

    ## Example usage
    ```python
    from beacon.tensor import Tensor
    from beacon.tensor import functions as fn
    t1 = Tensor([1, 4, 3])
    t2 = Tensor([2, 5, 6])
    x = fn.power(t1, t2)
    ```
    """
    data = np.power(t1.data, t2.data)
    requires_grad = (t1.requires_grad or t2.requires_grad) and not Tensor.NO_GRAD
    nodes = []
    if t1.requires_grad:
        nodes.append(Tensor.ComputationalGraphNode(tensor=t1, 
        df=lambda x: _broadcast(t1.data, x*t2.data*(t1.data**np.where(t2.data, t2.data-1, 1.)))))
    if t2.requires_grad:
        nodes.append(Tensor.ComputationalGraphNode(tensor=t2, 
        df=lambda x: _broadcast(t2.data, x * np.log((np.where(t1.data, t1.data, 1.)) * data))))
    return Tensor(data=data, requires_grad=requires_grad, nodes=nodes)
Example #5
0
def minimum(t1: Tensor, t2: Tensor):
    """
    Element-wise minimum of two tensor.

    ## Parameters:
    t1: `Tensor` - first tensor

    t2: `Tensor` - second tensor

    ## Example usage
    ```python
    from beacon.tensor import Tensor
    from beacon.tensor import functions as fn
    t1 = Tensor([1, 4, 3])
    t2 = Tensor([2, 5, 6])
    x = fn.minimum(t1, t2)
    ```
    """
    data = np.minimum(t1.data, t2.data)
    requires_grad = (t1.requires_grad or t2.requires_grad) and not Tensor.NO_GRAD
    nodes = []
    def min_grad(x, z, y):
        return (x == z) / (1.0 + (x == y))
    if t1.requires_grad:
        nodes.append(Tensor.ComputationalGraphNode(tensor=t1, df=lambda x: _broadcast(t1.data, x * min_grad(t1.data, data, t2.data))))
    if t2.requires_grad:
        nodes.append(Tensor.ComputationalGraphNode(tensor=t2, df=lambda x: _broadcast(t2.data, x * min_grad(t2.data, data, t1.data))))
    return Tensor(data=data, requires_grad=requires_grad, nodes=nodes)
Example #6
0
def matmul(t1: Tensor, t2: Tensor):
    """
    Matrix multiplications of two tensors.

    ## Parameters:
    t1: `Tensor` - first tensor

    t2: `Tensor` - second tensor

    ## Example usage
    ```python
    from beacon.tensor import Tensor
    from beacon.tensor import functions as fn
    t1 = Tensor([1, 2, 3])
    t2 = Tensor([4, 5, 6])
    x = fn.matmul(t1, t2)
    ```
    """
    data = np.matmul(t1.data, t2.data)
    requires_grad = (t1.requires_grad or t2.requires_grad) and not Tensor.NO_GRAD
    nodes = []
    if t1.requires_grad:
        nodes.append(Tensor.ComputationalGraphNode(tensor=t1, df=lambda x: np.matmul(x, t2.data.T)))
    if t2.requires_grad:
        nodes.append(Tensor.ComputationalGraphNode(tensor=t2, df=lambda x: np.matmul(t1.data.T, x)))
    return Tensor(data=data, requires_grad=requires_grad, nodes=nodes)
Example #7
0
def average_pool(x: Tensor, kernel: tuple, stride: tuple, padding: tuple):
    """
    Performs average pooling.

    ## Parameters
    x: `Tensor` - input tensor

    kernel: `tuple` - kernel shape

    stride: `tuple` - stride

    padding: `tuple` - padding

    ## Example usage
    ```python
    from beacon.tensor import Tensor
    from beacon.tensor import functions as fn
    import numpy as np

    x = Tensor(data=np.random.rand(32, 10, 24, 24), requires_grad=True)
    sample = fn.average_pool(x, kernel=(2, 2), stride=(2, 2), padding=(0, 0))
    ```
    """
    data = _average_pool(x.data, kernel, stride, padding)
    requires_grad = x.requires_grad and not Tensor.NO_GRAD
    nodes = []
    if x.requires_grad:
        nodes.append(Tensor.ComputationalGraphNode(tensor=x, df=lambda g: _average_pool_grad(g, x.data, kernel, stride, padding)))
    return Tensor(data=data, requires_grad=requires_grad, nodes=nodes)
Example #8
0
def slice(t: Tensor, index):
    """
    Return values in the tensor at the given position.

    ## Parameter:
    t: `Tensor` - input tensor
    
    index: `tuple` - numpy-like index

    ## Example usage
    from beacon.tensor import Tensor
    from beacon.tensor import functions as fn
    t = Tensor([[1, 1], [2, 2]])
    x = fn.slice(t, (0,...))
    """
    data = t.data[index]
    requires_grad = t.requires_grad and not Tensor.NO_GRAD
    nodes = []
    if requires_grad:
        def slice_grad(x):
            grad = np.zeros_like(t.data)
            grad[index] = x
            return grad
        nodes.append(Tensor.ComputationalGraphNode(tensor=t, df=lambda x: slice_grad(x)))
    return Tensor(data=data, requires_grad=requires_grad, nodes=nodes)
Example #9
0
def max(t: Tensor, axis=None, keepdims=False):
    """
    Returns max elements of the input tensor alongside given axis.

    ## Parameters
    t: `Tensor` - input tensor

    axis: `int` - defaults to None

    keepdims: `bool` - defaults to None

    ## Example usage

    ```python
    from beacon.tensor import Tensor
    from beacon.tensor import functions as fn
    t = Tensor([[1, 2, 3], [4, 5, 6]])
    x = fn.max(t, axis=1, keepdims=True)
    ```
    """
    data = np.max(t.data, axis=axis, keepdims=keepdims)
    requires_grad = t.requires_grad and not Tensor.NO_GRAD
    nodes = []
    if requires_grad:
        nodes.append(Tensor.ComputationalGraphNode(tensor=t, df=lambda x: _min_max_grad(x, data, t.data, axis, keepdims)))
    return Tensor(data=data, requires_grad=requires_grad, nodes=nodes)
Example #10
0
def mean(t: Tensor, axis=None, keepdims=False):
    """
    Numpy mean function equivalent.

    ## Parameters:
    t: `Tensor` - input tensor

    axis: `int` - defaults to None

    keepdims: `bool` - defaults to False

    ## Example usage
    ```python
    from beacon.tensor import Tensor
    from beacon.tensor import functions as fn
    t = Tensor([[1, 2, 3], [4, 5, 6]])
    x = fn.mean(t, axis=1)
    ```
    """
    data = np.mean(t.data, axis=axis, dtype=np.float, keepdims=keepdims)
    requires_grad = t.requires_grad and not Tensor.NO_GRAD
    nodes = []
    if requires_grad:
        def mean_grad(x):
            g, n = _match_shape(x, np.shape(t.data), axis, keepdims)
            return g / n
        nodes.append(Tensor.ComputationalGraphNode(tensor=t, df=lambda x: mean_grad(x)))
    return Tensor(data=data, requires_grad=requires_grad, nodes=nodes)
Example #11
0
def clip(t: Tensor, min_val, max_val):
    """
    Clips input tensor to minimum and maximum value.

    ## Parameters:
    t: `Tensor` - input tensor

    min_val: `float` - minimum value

    max_val: `float` - maximum value

    ## Example usage
    ```python
    from beacon.tensor import Tensor
    from beacon.tensor import functions as fn
    t = Tensor([2, 4, 6])
    x = fn.clip(t, 3, 8)
    ```
    """
    data = np.clip(t.data, min_val, max_val)
    requires_grad = t.requires_grad and not Tensor.NO_GRAD
    nodes = []
    if requires_grad:
        nodes.append(Tensor.ComputationalGraphNode(tensor=t, df=lambda x: x * np.logical_and(data != min_val, data != max_val)))
    return Tensor(data=data, requires_grad=requires_grad, nodes=nodes)
Example #12
0
def sum(t: Tensor, axis=None, keepdims=False):
    """
    Sums all the elements in tensor along given axis

    ## Parameters:
    t: `Tensor`

    axis: `int` - defaults to None

    keepdims: `bool` - defaults to False

    ## Example usage
    ```python
    from beacon.tensor import Tensor
    from beacon.tensor import functions as fn
    t = Tensor([1, 2, 3])
    x = fn.sum(t)
    ```
    """
    data = np.sum(t.data, axis=axis, keepdims=keepdims)
    requires_grad = t.requires_grad and not Tensor.NO_GRAD
    nodes = []
    if requires_grad:
        nodes.append(Tensor.ComputationalGraphNode(tensor=t, df=lambda x: _match_shape(x, t.data.shape, axis, keepdims)[0]))
    return Tensor(data=data, requires_grad=requires_grad, nodes=nodes)
Example #13
0
def tan(t: Tensor):
    """
    Applies tan function to all the elements of the input tensor.

    ## Parameters:
    t: `Tensor` - input tensor

    ## Example usage
    ```python
    from beacon.tensor import Tensor
    from beacon.tensor import functions as fn
    t = Tensor([1, 2, 3])
    x = fn.tan(t)
    ```
    """
    data = np.tan(t.data)
    requires_grad = t.requires_grad and not Tensor.NO_GRAD
    nodes = []
    if requires_grad:
        nodes.append(Tensor.ComputationalGraphNode(tensor=t, df=lambda x: x / np.cos(t.data)**2))
    return Tensor(data=data, requires_grad=requires_grad, nodes=nodes)
Example #14
0
def neg(t: Tensor):
    """
    Unary negation of tensor elements.

    ## Parameters:
    t: `Tensor` - input tensor

    ## Example usage
    ```python
    from beacon.tensor import Tensor
    from beacon.tensor import functions as fn
    t = Tensor([1, 2, 3])
    x = -t
    ```
    """
    data = -t.data
    requires_grad = t.requires_grad and not Tensor.NO_GRAD
    nodes = []
    if requires_grad:
        nodes.append(Tensor.ComputationalGraphNode(tensor=t, df=lambda x: -x))
    return Tensor(data=data, requires_grad=requires_grad, nodes=nodes)
Example #15
0
def reshape(t: Tensor, shape):
    """
    Reshapes tensor.

    ## Parameters:
    t: `Tensor` - input tensor

    shape: `tuple` - new shape

    ## Example usage
    ```python
    from beacon.tensor import Tensor
    from beacon.tensor import functions as fn
    t = Tensor([1, 2, 3], [4, 5, 6])
    x = fn.reshape(t, shape=(1, 6))
    ```
    """
    data = np.reshape(t.data, newshape=shape)
    requires_grad = t.requires_grad and not Tensor.NO_GRAD
    nodes = []
    if requires_grad:
        nodes.append(Tensor.ComputationalGraphNode(tensor=t, df=lambda x: np.reshape(x, np.shape(t.data))))
    return Tensor(data=data, requires_grad=requires_grad, nodes=nodes)