Exemplo n.º 1
0
 def test_dim_None(self, RandomizedRectifierLayer):
     import lasagne
     l_in = lasagne.layers.input.InputLayer((None, 3, 28, 28))
     layer = RandomizedRectifierLayer(l_in)
     input = np.ones((3, 3, 28, 28)).astype(theano.config.floatX)
     out = layer.get_output_for(input).eval()
     assert np.allclose(out, 1.0)
Exemplo n.º 2
0
 def test_dim_None(self, RandomizedRectifierLayer):
     import lasagne
     l_in = lasagne.layers.input.InputLayer((None, 3, 28, 28))
     layer = RandomizedRectifierLayer(l_in)
     input = np.ones((3, 3, 28, 28)).astype(theano.config.floatX)
     out = layer.get_output_for(input).eval()
     assert np.allclose(out, 1.0)
Exemplo n.º 3
0
    def test_get_output_for(self, RandomizedRectifierLayer):
        input_shape = (3, 3, 28, 28)

        # ensure slope never exceeds [lower,upper)
        input = np.random.randn(*input_shape).astype(theano.config.floatX)
        layer = RandomizedRectifierLayer(input_shape, shared_axes=0)
        self.assert_between(layer, input, layer.get_output_for(input).eval())

        # from here on, we want to check parameter sharing
        # this is easier to check if the input is all ones
        input = np.ones(input_shape).astype(theano.config.floatX) * -1

        # default: parameters shared along all but 2nd axis
        layer = RandomizedRectifierLayer(input_shape)
        out = layer.get_output_for(input).eval()
        assert [
                np.allclose(out.var(axis=a), 0)
                for a in range(4)
               ] == [True, False, True, True]

        # share across all axes (single slope)
        layer = RandomizedRectifierLayer(input_shape, shared_axes='all')
        out = layer.get_output_for(input).eval()
        assert [
                np.allclose(out.var(axis=a), 0)
                for a in range(4)
               ] == [True, True, True, True]

        # share across 1st axis
        layer = RandomizedRectifierLayer(input_shape, shared_axes=0)
        out = layer.get_output_for(input).eval()
        assert [
                np.allclose(out.var(axis=a), 0)
                for a in range(4)
               ] == [True, False, False, False]

        # share across 1st and 4th axes
        layer = RandomizedRectifierLayer(input_shape, shared_axes=(0, 3))
        out = layer.get_output_for(input).eval()
        assert [
                np.allclose(out.var(axis=a), 0)
                for a in range(4)
               ] == [True, False, False, True]
Exemplo n.º 4
0
    def test_get_output_for(self, RandomizedRectifierLayer):
        input_shape = (3, 3, 28, 28)

        # ensure slope never exceeds [lower,upper)
        input = np.random.randn(*input_shape).astype(theano.config.floatX)
        layer = RandomizedRectifierLayer(input_shape, shared_axes=0)
        self.assert_between(layer, input, layer.get_output_for(input).eval())

        # from here on, we want to check parameter sharing
        # this is easier to check if the input is all ones
        input = np.ones(input_shape).astype(theano.config.floatX) * -1

        # default: parameters shared along all but 2nd axis
        layer = RandomizedRectifierLayer(input_shape)
        out = layer.get_output_for(input).eval()
        assert [np.allclose(out.var(axis=a), 0)
                for a in range(4)] == [True, False, True, True]

        # share across all axes (single slope)
        layer = RandomizedRectifierLayer(input_shape, shared_axes='all')
        out = layer.get_output_for(input).eval()
        assert [np.allclose(out.var(axis=a), 0)
                for a in range(4)] == [True, True, True, True]

        # share across 1st axis
        layer = RandomizedRectifierLayer(input_shape, shared_axes=0)
        out = layer.get_output_for(input).eval()
        assert [np.allclose(out.var(axis=a), 0)
                for a in range(4)] == [True, False, False, False]

        # share across 1st and 4th axes
        layer = RandomizedRectifierLayer(input_shape, shared_axes=(0, 3))
        out = layer.get_output_for(input).eval()
        assert [np.allclose(out.var(axis=a), 0)
                for a in range(4)] == [True, False, False, True]
Exemplo n.º 5
0
 def test_deterministic(self, RandomizedRectifierLayer):
     input = np.ones((3, 3, 28, 28)).astype(theano.config.floatX) * -1
     layer = RandomizedRectifierLayer(input.shape, lower=0.4, upper=0.6)
     out = layer.get_output_for(input, deterministic=True)
     assert np.allclose(out, -0.5)
Exemplo n.º 6
0
 def test_low_eq_high(self, RandomizedRectifierLayer):
     input = np.ones((3, 3, 28, 28)).astype(theano.config.floatX) * -1
     layer = RandomizedRectifierLayer(input.shape, lower=0.5, upper=0.5)
     out = layer.get_output_for(input)
     assert np.allclose(out, -0.5)
Exemplo n.º 7
0
 def test_nomod_positive(self, RandomizedRectifierLayer):
     input = np.ones((3, 3, 28, 28)).astype(theano.config.floatX)
     layer = RandomizedRectifierLayer(input.shape)
     out = layer.get_output_for(input).eval()
     assert np.allclose(out, 1.0)
Exemplo n.º 8
0
 def test_high_low(self, RandomizedRectifierLayer):
     with pytest.raises(ValueError):
         RandomizedRectifierLayer((None, 3, 28, 28), lower=0.9, upper=0.1)
Exemplo n.º 9
0
 def test_deterministic(self, RandomizedRectifierLayer):
     input = np.ones((3, 3, 28, 28)).astype(theano.config.floatX) * -1
     layer = RandomizedRectifierLayer(input.shape, lower=0.4, upper=0.6)
     out = layer.get_output_for(theano.tensor.constant(input),
                                deterministic=True).eval()
     assert np.allclose(out, -0.5)
Exemplo n.º 10
0
 def test_low_eq_high(self, RandomizedRectifierLayer):
     input = np.ones((3, 3, 28, 28)).astype(theano.config.floatX) * -1
     layer = RandomizedRectifierLayer(input.shape, lower=0.5, upper=0.5)
     out = layer.get_output_for(theano.tensor.constant(input)).eval()
     assert np.allclose(out, -0.5)
Exemplo n.º 11
0
 def test_nomod_positive(self, RandomizedRectifierLayer):
     input = np.ones((3, 3, 28, 28)).astype(theano.config.floatX)
     layer = RandomizedRectifierLayer(input.shape)
     out = layer.get_output_for(input).eval()
     assert np.allclose(out, 1.0)