コード例 #1
0
def test_batchrenorm_mode_0_or_2_twice():
    # This is a regression test for issue #4881 with the old
    # batch normalization functions in the Theano backend.
    model = Sequential()
    model.add(normalization.BatchRenormalization(input_shape=(10, 5, 5), axis=1))
    model.add(normalization.BatchRenormalization(input_shape=(10, 5, 5), axis=1))
    model.compile(loss='mse', optimizer='sgd')

    X = np.random.normal(loc=5.0, scale=10.0, size=(20, 10, 5, 5))
    model.fit(X, X, epochs=1, verbose=0)
    model.predict(X)
コード例 #2
0
def test_shared_batchrenorm():
    '''Test that a BN layer can be shared
    across different data streams.
    '''
    # Test single layer reuse
    bn = normalization.BatchRenormalization(input_shape=(10, ))
    x1 = Input(shape=(10, ))
    bn(x1)

    x2 = Input(shape=(10, ))
    y2 = bn(x2)

    x = np.random.normal(loc=5.0, scale=10.0, size=(2, 10))
    model = Model(x2, y2)
    assert len(model.updates) == 5
    model.compile('sgd', 'mse')
    model.train_on_batch(x, x)

    # Test model-level reuse
    x3 = Input(shape=(10, ))
    y3 = model(x3)
    new_model = Model(x3, y3)
    assert len(model.updates) == 5
    new_model.compile('sgd', 'mse')
    new_model.train_on_batch(x, x)
コード例 #3
0
def test_batchrenorm_mode_1():
    norm_m1 = normalization.BatchRenormalization(input_shape=(10, ), mode=1)
    norm_m1.build(input_shape=(None, 10))

    for inp in [input_1, input_2, input_3]:
        out = (norm_m1.call(K.variable(inp)) - norm_m1.beta) / norm_m1.gamma
        assert_allclose(K.eval(K.mean(out)), 0.0, atol=1e-1)
        if inp.std() > 0.:
            assert_allclose(K.eval(K.std(out)), 1.0, atol=1e-1)
        else:
            assert_allclose(K.eval(K.std(out)), 0.0, atol=1e-1)
コード例 #4
0
def test_batchrenorm_mode_0_convnet():
    model = Sequential()
    norm_m0 = normalization.BatchRenormalization(axis=1, input_shape=(3, 4, 4), momentum=0.8)
    model.add(norm_m0)
    model.compile(loss='mse', optimizer='sgd')

    # centered on 5.0, variance 10.0
    X = np.random.normal(loc=5.0, scale=10.0, size=(1000, 3, 4, 4))
    model.fit(X, X, epochs=4, verbose=0)
    out = model.predict(X)
    out -= np.reshape(K.eval(norm_m0.beta), (1, 3, 1, 1))
    out /= np.reshape(K.eval(norm_m0.gamma), (1, 3, 1, 1))

    assert_allclose(np.mean(out, axis=(0, 2, 3)), 0.0, atol=1e-1)
    assert_allclose(np.std(out, axis=(0, 2, 3)), 1.0, atol=1e-1)
コード例 #5
0
def test_batchrenorm_mode_0_or_2():
    for training in [1, 0]:
        model = Sequential()
        norm_m0 = normalization.BatchRenormalization(input_shape=(10,), momentum=0.8)
        model.add(norm_m0)
        model.compile(loss='mse', optimizer='sgd')

        # centered on 5.0, variance 10.0
        X = np.random.normal(loc=5.0, scale=10.0, size=(1000, 10))
        model.fit(X, X, epochs=4, verbose=0)
        out = model.predict(X)
        out -= K.eval(norm_m0.beta)
        out /= K.eval(norm_m0.gamma)

        assert_allclose(out.mean(), 0.0, atol=1e-1)
        assert_allclose(out.std(), 1.0, atol=1e-1)
コード例 #6
0
def test_batchrenorm_clipping_schedule():
    '''Test that the clipping schedule isn't fixed at r_max=1, d_max=0'''
    inp = Input(shape=(10, ))
    bn = normalization.BatchRenormalization(t_delta=1.)
    out = bn(inp)
    model = Model(inp, out)
    model.compile('sgd', 'mse')

    x = np.random.normal(5, 10, size=(2, 10))
    y = np.random.normal(5, 10, size=(2, 10))

    r_max, d_max = K.get_value(bn.r_max), K.get_value(bn.d_max)
    assert r_max == 1
    assert d_max == 0

    for i in range(10):
        model.train_on_batch(x, y)

    r_max, d_max = K.get_value(bn.r_max), K.get_value(bn.d_max)
    assert_allclose([r_max, d_max], [3, 5], atol=1e-1)
コード例 #7
0
def test_batchrenorm_get_config():
    '''Test that get_config works on a model with a batchrenorm layer.'''
    x = Input(shape=(10, ))
    y = normalization.BatchRenormalization()(x)
    model = Model(x, y)
    model.get_config()