Example #1
0
def join(small_image,
         big_image,
         n_filter_small,
         n_filter_big,
         big_img_size_in,
         ordering=''):

    # upsample small image
    upsampled_small = tensor.repeat(small_image, 2, axis=2)
    upsampled_small = tensor.repeat(upsampled_small, 2, axis=3)

    img_size_small = (n_filter_small, big_img_size_in[0], big_img_size_in[1])
    img_size_big = (n_filter_big, big_img_size_in[0], big_img_size_in[1])

    bn_small = BatchNormalization(img_size_small,
                                  name='bn_small%s' % (ordering, ))
    bn_small.initialize()
    bn_big = BatchNormalization(img_size_big, name='bn_big%s' % (ordering, ))
    bn_big.initialize()

    depth_concat = tensor.concatenate(
        [bn_small.apply(upsampled_small),
         bn_big.apply(big_image)], axis=1)

    return depth_concat
Example #2
0
def test_batch_normalization_broadcastable_sanity():
    bn = BatchNormalization((5, 3, 2), broadcastable=(False, True, False))
    with bn:
        cg = ComputationGraph([bn.apply(tensor.tensor4('abc'))])
    vars = VariableFilter(roles=[BATCH_NORM_MINIBATCH_ESTIMATE])(cg)
    assert all(v.broadcastable[1:] == bn.population_mean.broadcastable
               for v in vars)
Example #3
0
def test_batch_normalization_broadcastable_sanity():
    bn = BatchNormalization((5, 3, 2), broadcastable=(False, True, False))
    with bn:
        cg = ComputationGraph([bn.apply(tensor.tensor4('abc'))])
    vars = VariableFilter(roles=[BATCH_NORM_MINIBATCH_ESTIMATE])(cg)
    assert all(v.broadcastable[1:] == bn.population_mean.broadcastable
               for v in vars)
Example #4
0
def test_batch_normalization_simple():
    x = tensor.matrix()
    eps = 1e-4
    bn = BatchNormalization(input_dim=4, epsilon=eps)
    bn.initialize()
    with batch_normalization(bn):
        y = bn.apply(x)
    rng = numpy.random.RandomState((2016, 1, 18))
    x_ = rng.uniform(size=(5, 4)).astype(theano.config.floatX)
    y_ = y.eval({x: x_})
    y_expected = (x_ - x_.mean(axis=0)) / numpy.sqrt(x_.var(axis=0) + eps)
    assert_allclose(y_, y_expected, rtol=1e-4)
Example #5
0
def test_batch_normalization_simple():
    x = tensor.matrix()
    eps = 1e-4
    bn = BatchNormalization(input_dim=4, epsilon=eps)
    bn.initialize()
    with batch_normalization(bn):
        y = bn.apply(x)
    rng = numpy.random.RandomState((2016, 1, 18))
    x_ = rng.uniform(size=(5, 4)).astype(theano.config.floatX)
    y_ = y.eval({x: x_})
    y_expected = (x_ - x_.mean(axis=0)) / numpy.sqrt(x_.var(axis=0) + eps)
    assert_allclose(y_, y_expected, rtol=1e-4)