def test_softmax(): from keras.activations import softmax as s # Test using a reference implementation of softmax def softmax(values): m = max(values) values = numpy.array(values) e = numpy.exp(values - m) dist = list(e / numpy.sum(e)) return dist x = T.vector() exp = s(x) f = theano.function([x], exp) test_values = get_standard_values() result = f(test_values) expected = softmax(test_values) print(str(result)) print(str(expected)) list_assert_equal(result, expected)
def test_softmax(): from keras.activations import softmax as s # Test using a reference implementation of softmax def softmax(values): m = max(values) values = numpy.array(values) e = numpy.exp(values - m) dist = list(e / numpy.sum(e)) return dist x = T.vector() exp = s(x) f = theano.function([x], exp) test_values=get_standard_values() result = f(test_values) expected = softmax(test_values) print(str(result)) print(str(expected)) list_assert_equal(result, expected)
def test_softmax(self): from keras.activations import softmax as s # Test using a reference implementation of softmax def softmax(values): m = np.max(values) e = np.exp(values - m) return e / np.sum(e) x = K.placeholder(ndim=2) exp = s(x) f = K.function([x], [exp]) test_values = get_standard_values() result = f([test_values])[0] expected = softmax(test_values) assert_allclose(result, expected, rtol=1e-05)