def test_relu(): ''' Relu implementation doesn't depend on the value being a theano variable. Testing ints, floats and theano tensors. ''' from keras.activations import relu as r assert r(5) == 5 assert r(-5) == 0 assert r(-0.1) == 0 assert r(0.1) == 0.1 x = T.vector() exp = r(x) f = theano.function([x], exp) test_values = get_standard_values() result = f(test_values) list_assert_equal(result, test_values) # because no negatives in test values
def test_relu(self): """ Relu implementation doesn't depend on the value being a theano variable. Testing ints, floats and theano tensors. """ from keras.activations import relu as r x = K.placeholder(ndim=2) exp = r(x) f = K.function([x], [exp]) test_values = get_standard_values() result = f([test_values])[0] # because no negatives in test values assert_allclose(result, test_values, rtol=1e-05)
def test_relu(self): ''' Relu implementation doesn't depend on the value being a theano variable. Testing ints, floats and theano tensors. ''' from keras.activations import relu as r x = K.placeholder(ndim=2) exp = r(x) f = K.function([x], [exp]) test_values = get_standard_values() result = f([test_values])[0] # because no negatives in test values assert_allclose(result, test_values, rtol=1e-05)