def softmax(x, name=None): """ Squashes the input values `x` such that they add up to 1: :math:`softmax(x) = {\exp(x_i) - \max_{x_i \in x}(\exp(x_i)) \over {\sum_{x_i \in x} \exp(x_i)- \max_{x_i \in x}(\exp(x_i)) }}` The term :math:`\max_{x_i \in x}(\exp(x_i))` is subtracted for numerical stability. Example: >>> C.eval(C.softmax([[1, 1, 2, 3]])) [array([[[ 0.082595, 0.082595, 0.224515, 0.610296]]])] >>> C.eval(C.softmax([1, 1])) [array([[ 0.5, 0.5]])] Args: x: any :class:`cntk.graph.ComputationNode` that outputs a tensor Returns: :class:`cntk.graph.ComputationNode` """ from cntk.ops.cntk2 import Softmax op = Softmax(x) wrap_numpy_arrays(op) op.rank = op._.rank return op
def softmax(x, name=None): """ Squashes the input values `x` such that they add up to 1: :math:`softmax(x) = {\exp(x_i) - \max_{x_i \in x}(\exp(x_i)) \over {\sum_{x_i \in x} \exp(x_i)- \max_{x_i \in x}(\exp(x_i)) }}` The term :math:`\max_{x_i \in x}(\exp(x_i))` is subtracted for numerical stability. Example: >>> C.eval(C.softmax([[1, 1, 2, 3]])) [array([[[ 0.082595, 0.082595, 0.224515, 0.610296]]])] >>> C.eval(C.softmax([1, 1])) [array([[ 0.5, 0.5]])] Args: x: numpy array or any :class:`cntk.graph.ComputationNode` that outputs a tensor name (str): the name of the node in the network Returns: :class:`cntk.graph.ComputationNode` """ from cntk.ops.cntk2 import Softmax op = Softmax(x) wrap_numpy_arrays(op) op.rank = op._.rank return op
def softmax(x, name=None): """ Softmax operation. Squashes the input values `x` such that they add up to 1: :math:`softmax(x) = {\exp(x_i) - \max_{x_i \in x}(\exp(x_i)) \over {\sum_{x_i \in x} \exp(x_i)- \max_{x_i \in x}(\exp(x_i)) }}` The term :math:`\max_{x_i \in x}(\exp(x_i))` is subtracted for numerical stability. Example: >>> C.eval(C.softmax([[1, 1, 2, 3]])) [array([[[ 0.082595, 0.082595, 0.224515, 0.610296]]])] >>> C.eval(C.softmax([1, 1])) [array([[ 0.5, 0.5]])] Args: x: any :class:`cntk.graph.ComputationNode` that outputs a tensor Returns: :class:`cntk.graph.ComputationNode` """ from cntk.ops.cntk2 import Softmax return Softmax(x)