def test_elu(): from keras.layers.advanced_activations import ELU np.random.seed(1337) inp = get_standard_values() for alpha in [0.1, .5, -1., 1.]: layer = ELU(alpha=alpha) layer.input = K.variable(inp) for train in [True, False]: outp = K.eval(layer.get_output(train)) assert_allclose(outp, inp, rtol=1e-3) layer.input = K.variable(-inp) for train in [True, False]: outp = K.eval(layer.get_output(train)) assert_allclose(outp, alpha*(np.exp(-inp)-1.), rtol=1e-3) config = layer.get_config() assert config['alpha'] == alpha
def test_elu(): from keras.layers.advanced_activations import ELU np.random.seed(1337) inp = get_standard_values() for alpha in [0.1, .5, -1., 1.]: layer = ELU(alpha=alpha) layer.input = K.variable(inp) for train in [True, False]: outp = K.eval(layer.get_output(train)) assert_allclose(outp, inp, rtol=1e-3) layer.input = K.variable(-inp) for train in [True, False]: outp = K.eval(layer.get_output(train)) assert_allclose(outp, alpha * (np.exp(-inp) - 1.), rtol=1e-3) config = layer.get_config() assert config['alpha'] == alpha