Exemplo n.º 1
0
    def test_skip_linear_transform(self, BatchNormLayer):
        input_shape = (20, 30, 40)

        # random input tensor, beta, gamma
        input = (np.random.randn(*input_shape).astype(theano.config.floatX) +
                 np.random.randn(1, 30, 1).astype(theano.config.floatX))
        beta = np.random.randn(30).astype(theano.config.floatX)
        gamma = np.random.randn(30).astype(theano.config.floatX)

        # create layers without beta or gamma
        layer1 = BatchNormLayer(input_shape, beta=None, gamma=gamma)
        layer2 = BatchNormLayer(input_shape, beta=beta, gamma=None)

        # check that one parameter is missing
        assert len(layer1.get_params()) == 3
        assert len(layer2.get_params()) == 3

        # call get_output_for()
        result1 = layer1.get_output_for(theano.tensor.constant(input),
                                        deterministic=False).eval()
        result2 = layer2.get_output_for(theano.tensor.constant(input),
                                        deterministic=False).eval()

        # compute expected results and expected updated parameters
        mean = input.mean(axis=(0, 2))
        std = np.sqrt(input.var(axis=(0, 2)) + layer1.epsilon)
        exp_result = (input - mean[None, :, None]) / std[None, :, None]
        exp_result1 = exp_result * gamma[None, :, None]  # no beta
        exp_result2 = exp_result + beta[None, :, None]  # no gamma

        # compare expected results to actual results
        tol = {'atol': 1e-5, 'rtol': 1e-6}
        assert np.allclose(result1, exp_result1, **tol)
        assert np.allclose(result2, exp_result2, **tol)
Exemplo n.º 2
0
    def test_skip_linear_transform(self, BatchNormLayer):
        input_shape = (20, 30, 40)

        # random input tensor, beta, gamma
        input = (np.random.randn(*input_shape).astype(theano.config.floatX) +
                 np.random.randn(1, 30, 1).astype(theano.config.floatX))
        beta = np.random.randn(30).astype(theano.config.floatX)
        gamma = np.random.randn(30).astype(theano.config.floatX)

        # create layers without beta or gamma
        layer1 = BatchNormLayer(input_shape, beta=None, gamma=gamma)
        layer2 = BatchNormLayer(input_shape, beta=beta, gamma=None)

        # check that one parameter is missing
        assert len(layer1.get_params()) == 3
        assert len(layer2.get_params()) == 3

        # call get_output_for()
        result1 = layer1.get_output_for(theano.tensor.constant(input),
                                        deterministic=False).eval()
        result2 = layer2.get_output_for(theano.tensor.constant(input),
                                        deterministic=False).eval()

        # compute expected results and expected updated parameters
        mean = input.mean(axis=(0, 2))
        std = np.sqrt(input.var(axis=(0, 2)) + layer1.epsilon)
        exp_result = (input - mean[None, :, None]) / std[None, :, None]
        exp_result1 = exp_result * gamma[None, :, None]  # no beta
        exp_result2 = exp_result + beta[None, :, None]  # no gamma

        # compare expected results to actual results
        tol = {'atol': 1e-5, 'rtol': 1e-6}
        assert np.allclose(result1, exp_result1, **tol)
        assert np.allclose(result2, exp_result2, **tol)
Exemplo n.º 3
0
 def test_batch_norm_tag(self, BatchNormLayer):
     input_shape = (20, 30, 40)
     layer = BatchNormLayer(input_shape)
     assert len(layer.get_params()) == 4
     stat_params = layer.get_params(batch_norm_stat=True)
     assert len(stat_params) == 2
     param_names = [p.name for p in stat_params]
     assert "mean" in param_names
     assert "inv_std" in param_names
Exemplo n.º 4
0
 def test_batch_norm_tag(self, BatchNormLayer):
     input_shape = (20, 30, 40)
     layer = BatchNormLayer(input_shape)
     assert len(layer.get_params()) == 4
     stat_params = layer.get_params(batch_norm_stat=True)
     assert len(stat_params) == 2
     param_names = [p.name for p in stat_params]
     assert "mean" in param_names
     assert "inv_std" in param_names