コード例 #1
0
def test_regularizations():

    layer = custom_objs.GroupNormalization(
        gamma_regularizer='l1', beta_regularizer='l1', groups=4, axis=2)
    layer.build((None, 4, 4))
    assert (len(layer.losses) == 2)
    max_norm = tf.keras.constraints.max_norm
    layer = custom_objs.GroupNormalization(
        gamma_constraint=max_norm, beta_constraint=max_norm)
    layer.build((None, 3, 4))
    assert (layer.gamma.constraint == max_norm)
    assert (layer.beta.constraint == max_norm)
コード例 #2
0
    def run_reshape_test(axis, group, input_shape, expected_shape):
        group_layer = custom_objs.GroupNormalization(groups=group, axis=axis)
        group_layer._set_number_of_groups_for_instance_norm(input_shape)

        inputs = np.ones(input_shape)
        tensor_input_shape = tf.convert_to_tensor(input_shape)
        reshaped_inputs, group_shape = group_layer._reshape_into_groups(
            inputs, (10, 10, 10), tensor_input_shape)
        for i in range(len(expected_shape)):
            assert (int(group_shape[i]) == expected_shape[i])
コード例 #3
0
def test_groupnorm_flat():
    # Check basic usage of groupnorm_flat
    # Testing for 1 == LayerNorm, 16 == GroupNorm, -1 == InstanceNorm

    groups = [-1, 16, 1]
    shape = (64,)
    for i in groups:
        model = _create_and_fit_Sequential_model(
            custom_objs.GroupNormalization(groups=i), shape)
        assert (hasattr(model.layers[0], 'gamma'))
        assert (hasattr(model.layers[0], 'beta'))
コード例 #4
0
def test_apply_normalization():

    input_shape = (1, 4)
    expected_shape = (1, 2, 2)
    reshaped_inputs = tf.constant([[[2.0, 2.0], [3.0, 3.0]]])
    layer = custom_objs.GroupNormalization(groups=2, axis=1, scale=False, center=False)
    normalized_input = layer._apply_normalization(reshaped_inputs,
                                                  input_shape)
    assert (
        tf.reduce_all(
            tf.equal(normalized_input,
                     tf.constant([[[0.0, 0.0], [0.0, 0.0]]]))))
コード例 #5
0
def test_initializer():
    # Check if the initializer for gamma and beta is working correctly

    layer = custom_objs.GroupNormalization(
        groups=32,
        beta_initializer='random_normal',
        beta_constraint='NonNeg',
        gamma_initializer='random_normal',
        gamma_constraint='NonNeg')

    model = _create_and_fit_Sequential_model(layer, (64,))

    weights = np.array(model.layers[0].get_weights())
    negativ = weights[weights < 0.0]
    assert (len(negativ) == 0)
コード例 #6
0
def _test_specific_layer(inputs, axis, groups, center, scale):

    input_shape = inputs.shape

    # Get Output from Keras model
    layer = custom_objs.GroupNormalization(
        axis=axis, groups=groups, center=center, scale=scale)
    model = tf.keras.models.Sequential()
    model.add(layer)
    outputs = model.predict(inputs)
    assert not (np.isnan(outputs).any())

    # Create shapes
    if groups is -1:
        groups = input_shape[axis]
    np_inputs = inputs.numpy()
    reshaped_dims = list(np_inputs.shape)
    reshaped_dims[axis] = reshaped_dims[axis] // groups
    reshaped_dims.insert(1, groups)
    reshaped_inputs = np.reshape(np_inputs, tuple(reshaped_dims))

    # Calculate mean and variance
    mean = np.mean(
        reshaped_inputs,
        axis=tuple(range(2, len(reshaped_dims))),
        keepdims=True)
    variance = np.var(
        reshaped_inputs,
        axis=tuple(range(2, len(reshaped_dims))),
        keepdims=True)

    # Get gamma and beta initalized by layer
    gamma, beta = layer._get_reshaped_weights(input_shape)
    if gamma is None:
        gamma = 1.0
    if beta is None:
        beta = 0.0

    # Get ouput from Numpy
    zeroed = reshaped_inputs - mean
    rsqrt = 1 / np.sqrt(variance + 1e-5)
    output_test = gamma * zeroed * rsqrt + beta

    # compare outputs
    output_test = np.reshape(output_test, input_shape.as_list())
    assert np.allclose(np.mean(output_test - outputs), 0., atol=1e-7, rtol=1e-7)
コード例 #7
0
def test_groupnorm_conv():
    # Check if Axis is working for CONV nets
    # Testing for 1 == LayerNorm, 5 == GroupNorm, -1 == InstanceNorm

    groups = [-1, 5, 1]
    for i in groups:
        model = tf.keras.models.Sequential()
        model.add(
            custom_objs.GroupNormalization(axis=1, groups=i, input_shape=(20, 20, 3)))
        model.add(tf.keras.layers.Conv2D(5, (1, 1), padding='same'))
        model.add(tf.keras.layers.Flatten())
        model.add(tf.keras.layers.Dense(1, activation='softmax'))
        model.compile(
            optimizer=tf.keras.optimizers.RMSprop(0.01), loss='mse')
        x = np.random.randint(1000, size=(10, 20, 20, 3))
        y = np.random.randint(1000, size=(10, 1))
        a = model.fit(x=x, y=y, epochs=1)
        assert (hasattr(model.layers[0], 'gamma'))
コード例 #8
0
def test_axis_error():

    with pytest.raises(ValueError):
        custom_objs.GroupNormalization(axis=0)
コード例 #9
0
def test_weights():
    # Check if weights get initialized correctly
    layer = custom_objs.GroupNormalization(groups=1, scale=False, center=False)
    layer.build((None, 3, 4))
    assert (len(layer.trainable_weights) == 0)
    assert (len(layer.weights) == 0)