Ejemplo n.º 1
0
def test_optimization():
    op = CrossMapNorm(16, 15./16., 1, True)
    x_ = theano.tensor.TensorVariable(CudaNdarrayType([False] * 4))
    f = theano.function([x_], theano.grad(op(x_)[0].sum(), x_))
    nodes = [x for x in f.maker.fgraph.apply_nodes
             if type(x.op) == CrossMapNormUndo]
    assert len(nodes) == 1
    assert nodes[0].op.inplace
Ejemplo n.º 2
0
def test_cross_map_norm_grad_simple():
    rng = numpy.random.RandomState([2013, 2, 10])
    op = CrossMapNorm(16, 15/16., 1, True)
    make_graph = lambda inp: op(gpu_from_host(inp))[0]
    verify = lambda array: verify_grad(make_graph, [array])
    inputs = [numpy.ones((16, 1, 1, 1), dtype='float32'),
              rng.normal(size=(32, 5, 5, 10)).astype('float32')]
    for arr in inputs:
        yield verify, arr
Ejemplo n.º 3
0
    def __init__(self, input_layer, alpha=1e-4, beta=0.75, size_f=5, blocked=True):
        self.alpha = alpha
        self.beta = beta
        self.size_f = size_f
        self.blocked = blocked
        self.input_layer = input_layer
        self.params = []
        self.bias_params = []
        self.mb_size = self.input_layer.mb_size

        self.norm_op = CrossMapNorm(size_f=size_f, add_scale=alpha, pow_scale=beta, blocked=blocked)
Ejemplo n.º 4
0
    def __init__(self, alpha=1e-4, beta=0.75, size_f=5, blocked=True):
        """
        .. todo::

            WRITEME properly

        I kept the same parameter names where I was sure they
        actually are the same parameters (with respect to
        CrossChannelNormalization).
        """
        self._op = CrossMapNorm(size_f=size_f,
                                add_scale=alpha,
                                pow_scale=beta,
                                blocked=blocked)
Ejemplo n.º 5
0
def test_cross_map_norm_noncontiguous_grad():
    # Check the case reported at https://groups.google.com/d/topic/pylearn-users/KxIYc3hczf4/discussion
    x = cuda_ftensor4('x')
    x_shuffled = x.dimshuffle(1, 2, 3, 0)
    x_shuffled = gpu_contiguous(x_shuffled)
    response_norm = CrossMapNorm(
            size_f=16, add_scale=(15. / 16.), pow_scale=1, blocked=True)
    output_shuffled = response_norm(x_shuffled)[0]
    output = output_shuffled.dimshuffle(3, 0, 1, 2)
    cost = output.sum()
    cost.name = 'cost'
    grad_x = theano.grad(cost, x)
    f = theano.function([x], grad_x, mode=mode_with_gpu)
    x_val = CudaNdarray(numpy.ones((2, 16, 2, 2), dtype='float32'))
    f(x_val)
Ejemplo n.º 6
0
    def init(self, input, input_shape, normSizeAlphaBeta):
        """
            Allocate a LeNetConvPoolLayer with shared variable internal parameters.
            
            
            type normSizeAlphaBeta: tuple or list of size 3
            :param normSizeAlphaBeta: (size,alpha,beta)
            """

        self.input = gpu_contiguous(input)
        #NORM
        norm_op = CrossMapNorm(size_f=normSizeAlphaBeta[0],
                               add_scale=normSizeAlphaBeta[1],
                               pow_scale=normSizeAlphaBeta[2],
                               blocked=True)
        self.output = norm_op(self.input)[0]

        self.input_shape = input_shape
        self.output_shape = input_shape

        self.params = []
Ejemplo n.º 7
0
    def cross_channel_norm(self, X, shape=None, alpha=1e-4, beta=0.75, n=5):
        from pylearn2.sandbox.cuda_convnet.response_norm import CrossMapNorm
        from theano.sandbox.cuda.basic_ops import gpu_contiguous
        # from theano.sandbox.cuda import gpu_from_host, host_from_gpu

        #shape: BCF01
        input_C01BF = X.dimshuffle(1, 3, 4, 0, 2)  #C01BF
        # s = input_C01BF.shape
        # shape_C01B = T.cast(s[:-2]+(s[-2]*s[-1],), 'int32')

        shape_C01B = (
            shape[1],
            shape[3],
            shape[4],
            shape[0] * shape[2],
        )
        shape_C01BF = (
            shape[1],
            shape[3],
            shape[4],
            shape[0],
            shape[2],
        )

        input_C01B = input_C01BF.reshape(shape_C01B)  #C01B

        op = CrossMapNorm(size_f=n,
                          add_scale=alpha,
                          pow_scale=beta,
                          blocked=True)

        # return host_from_gpu(op(gpu_from_host(X))[0])
        out = op(gpu_contiguous(input_C01B))[0]
        # out = host_from_gpu(op(gpu_from_host(input_C01B))[0])

        out = out.reshape(shape_C01BF)  #C01BF
        out = out.dimshuffle(3, 0, 4, 1, 2)  #BCF01

        return out
Ejemplo n.º 8
0
 def __init__(self, alpha=1e-4, beta=0.75, size_f=5, blocked=True):
     self._op = CrossMapNorm(size_f=size_f,
                             add_scale=alpha,
                             pow_scale=beta,
                             blocked=blocked)
Ejemplo n.º 9
0
def test_cross_map_norm_simple():
    op = CrossMapNorm(16, 15. / 16., 1., True)
    x = CudaNdarray(numpy.ones((16, 2, 2, 2), dtype='float32'))
    x_ = theano.tensor.TensorVariable(CudaNdarrayType([False] * 4))
    f = theano.function([x_], op(x_)[0])
    numpy.testing.assert_allclose(f(x), 0.0625)