コード例 #1
0
def test_cross_map_norm_noncontiguous_grad():
    # Check the case reported at https://groups.google.com/d/topic/pylearn-users/KxIYc3hczf4/discussion
    x = cuda_ftensor4('x')
    x_shuffled = x.dimshuffle(1, 2, 3, 0)
    x_shuffled = gpu_contiguous(x_shuffled)
    response_norm = CrossMapNorm(size_f=16,
                                 add_scale=(15. / 16.),
                                 pow_scale=1,
                                 blocked=True)
    output_shuffled = response_norm(x_shuffled)[0]
    output = output_shuffled.dimshuffle(3, 0, 1, 2)
    cost = output.sum()
    cost.name = 'cost'
    grad_x = theano.grad(cost, x)
    f = theano.function([x], grad_x, mode=mode_with_gpu)
    x_val = CudaNdarray(numpy.ones((2, 16, 2, 2), dtype='float32'))
    f(x_val)
コード例 #2
0
def test_cross_map_norm_simple():
    op = CrossMapNorm(16, 15. / 16., 1., True)
    x = CudaNdarray(numpy.ones((16, 2, 2, 2), dtype='float32'))
    x_ = theano.tensor.TensorVariable(CudaNdarrayType([False] * 4))
    f = theano.function([x_], op(x_)[0])
    numpy.testing.assert_allclose(f(x), 0.0625)