Esempio n. 1
0
def relu(x, name=None):
    """
    Rectified linear operation. Computes the element-wise rectified linear
    of `x`: ``max(x, 0)``

    The output tensor has the same shape as `x`.

    Example:
        >>> C.eval(C.relu([[-1, -0.5, 0, 1, 2]]))
        [array([[[ 0.,  0.,  0.,  1.,  2.]]])]
    
    Args:
        x: any :class:`cntk.graph.ComputationNode` that outputs a tensor
    Returns:
        :class:`cntk.graph.ComputationNode`
    """
    from cntk.ops.cntk2 import Relu
    op = Relu(x, name=name)
    wrap_numpy_arrays(op)    
    op.rank = op._.rank
    return op
Esempio n. 2
0
def relu(x, name=None):
    """
    Rectified linear operation. Computes the element-wise rectified linear
    of `x`: ``max(x, 0)``

    The output tensor has the same shape as `x`.

    Example:
        >>> C.eval(C.relu([[-1, -0.5, 0, 1, 2]]))
        [array([[[ 0.,  0.,  0.,  1.,  2.]]])]
    
    Args:
        x: any :class:`cntk.graph.ComputationNode` that outputs a tensor
    Returns:
        :class:`cntk.graph.ComputationNode`
    """
    from cntk.ops.cntk2 import Relu
    op = Relu(x, name=name)
    wrap_numpy_arrays(op)
    op.rank = op._.rank
    return op