def test_relu(arg): check_gradients(activations.relu, [arg]) check_gradients(activations.get('relu'), [arg])
def test_reshape_basic(): arg = tf.random.randn(3, 4, 5) check_gradients(lambda x: tf.reshape(x, (5, 12)), [arg])
def test_transpose(arg): check_gradients(tf.transpose, [arg])
def test_flatten_basic(): arg = tf.random.randn(3, 4, 5) check_gradients(tf.flatten, [arg])
def test_reshape(arg): check_gradients(lambda x: tf.reshape(x, (-1)), [arg])
def test_tanh(arg): check_gradients(activations.tanh, [arg]) check_gradients(activations.get('tanh'), [arg])
def test_dot(): mat1 = tf.random.randn(50, 11) mat2 = tf.random.randn(11, 40) # vect1 = tf.random.randn(10) # vect2 = tf.random.randn(11) check_gradients(tf.dot, [mat1, mat2])
def test_square(arg): check_gradients(tf.square, [arg])
def test_exp(arg): check_gradients(tf.exp, [arg])
def test_divide(arg_x, arg_y): check_gradients(lambda x, y: x / y, [arg_x, arg_y], atol=1e-2, rtol=1e-2)
def test_minimum(arg_x, arg_y): check_gradients(lambda x: tf.minimum(x, x), [arg_x]) check_gradients(lambda x: tf.minimum(x, x), [arg_y]) check_gradients(tf.maximum, [arg_x, arg_y])
def test_multiple(arg_x, arg_y): check_gradients(lambda x, y: x * y, [arg_x, arg_y])
def test_subtract(arg_x, arg_y): check_gradients(lambda x, y: x - y, [arg_x, arg_y])
def test_add(arg_x, arg_y): check_gradients(lambda x, y: x + y, [arg_x, arg_y])
def test_leakyrelu(arg): check_gradients(activations.leakyrelu, [arg]) check_gradients(activations.get('leakyrelu'), [arg])
def test_negative(arg): check_gradients(tf.negative, [arg])
def test_sigmoid(arg): check_gradients(activations.sigmoid, [arg]) check_gradients(activations.get('sigmoid'), [arg])
def test_flatten(arg): check_gradients(tf.flatten, [arg])
def test_linear(arg): check_gradients(activations.linear, [arg]) check_gradients(activations.get(None), [arg]) check_gradients(activations.get('linear'), [arg])
def test_where(arg): check_gradients(lambda x: tf.where(x > 0, x, x), [arg]) check_gradients(lambda x: tf.where(x > 0, x, x * 2), [arg])