Ejemplo n.º 1
0
class SoftmaxPair(ActivationPair):
    neon_activation = Softmax()
    tolerance = 1e-6

    def reference_value(self, x):
        return (np.exp(x - 1) / np.sum(np.exp(x - 1), axis=0, keepdims=True))

    def reference_derivative(self, x):
        f = self.reference_value(x)
        return f * (1.0 - f)
Ejemplo n.º 2
0
def test_softmax_big_inputs(transformer_factory):
    """
    This fails with memory error because the ex.derivative function
    attempts to compute the full derivative.

    Keeping this test since it was in original neon.
    """
    inputs = np.random.random((1000, 128))
    outputs = (np.exp(inputs - 1) / np.sum(np.exp(inputs - 1)))
    outputs = outputs * (1 - outputs)  # shortcut only
    compare_tensors(Softmax(), inputs, outputs, deriv=True, tol=1e-6)
Ejemplo n.º 3
0
def test_softmax_derivative(transformer_factory):
    inputs = np.array([0, 1, -2], dtype=np.float).reshape((3, 1))
    outputs = (np.exp(inputs - 1) / np.sum(np.exp(inputs - 1)))
    outputs = outputs * (1 - outputs)  # shortcut only
    compare_tensors(Softmax(), inputs, outputs, deriv=True, tol=1e-6)
Ejemplo n.º 4
0
def test_softmax(transformer_factory):
    inputs = np.array([0, 1, -2]).reshape((3, 1))
    outputs = (np.exp(inputs - 1) / np.sum(np.exp(inputs - 1))).reshape((3, 1))
    compare_tensors(Softmax(), inputs, outputs, tol=1e-5)