def test_graident_4_layer():
    ba, c, h, w = (2, 3, 4, 4)
    b = caffe.Blob([ba, c, h, w])
    t = caffe.Blob([])
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.vision_layers"
    lp.python_param.layer = "Gradient4Layer"
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert top[0].shape == (ba, 2, c, h, w)
    assert np.all(
        top[0].data[:, 0, :, :, :-1] ==
        bottom[0].data[..., :, :-1] - bottom[0].data[..., :, 1:])
    assert np.all(
        top[0].data[:, 1, :, :-1, :] ==
        bottom[0].data[..., :-1, :] - bottom[0].data[..., 1:, :])
    checker = GradientChecker(1e-2, 1e-4)
    checker.check_gradient_exhaustive(
        layer, bottom, top)
def test_tile_layer():
    ba, c, h, w = [2, 3, 3, 4]
    b, t = caffe.Blob([ba, c, h, w]), caffe.Blob([])
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "TileLayer"
    axis = 1
    num = 5
    lp.python_param.param_str = str(
        {'axis': axis, 'num': num})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert t.shape == (ba, c * num, h, w)
    reps = [1 for _ in t.shape]
    reps[axis] = num
    assert np.all(np.tile(b.data, reps) == t.data)
    checker = GradientChecker(1e-2, 1e-5)
    checker.check_gradient_exhaustive(
        layer, bottom, top)
Esempio n. 3
0
def test_sil2_loss_layer_backward(sil2_loss_layer, blob_4_2322_init):
    layer = sil2_loss_layer
    pred, label, mask, top = blob_4_2322_init
    bottom = [pred, label, mask]
    top = [top]
    checker = GradientChecker(1e-2, 1e-2)
    checker.check_gradient_exhaustive(layer, bottom, top, check_bottom=[0, 1])
def test_crossent_layer(request):
    if request.config.getoption('caffe_cpu'):
        raise pytest.skip("crossentropyLossLayer requires GPU")
    pred = caffe.Blob((5, 8))
    label = caffe.Blob((5, 1))
    loss = caffe.Blob([])
    bottom = [pred, label]
    top = [loss]

    # Fill
    rng = np.random.RandomState(313)
    pred.data[...] = rng.rand(*pred.shape) + 0.1
    label.data[...] = rng.randint(0, 8, label.shape)
    pred.data[...] = pred.data / pred.data.sum(axis=1, keepdims=True)
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.loss_layers"
    lp.python_param.layer = "CrossEntropyLossLayer"
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    ref = -np.mean(
        np.log(
            np.maximum(np.finfo(np.float32).tiny, pred.data)
        ).reshape(pred.shape[0], -1)[
            np.arange(pred.shape[0]), label.data.astype('int32')]
    )
    assert np.isclose(ref, loss.data)
    checker = GradientChecker(1e-3, 1e-2)
    checker.check_gradient_exhaustive(
        layer, bottom, top, check_bottom=[0])
def test_tile_layer():
    ba, c, h, w = [2, 3, 3, 4]
    b, t = caffe.Blob([ba, c, h, w]), caffe.Blob([])
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "TileLayer"
    axis = 1
    num = 5
    lp.python_param.param_str = str({'axis': axis, 'num': num})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert t.shape == (ba, c * num, h, w)
    reps = [1 for _ in t.shape]
    reps[axis] = num
    assert np.all(np.tile(b.data, reps) == t.data)
    checker = GradientChecker(1e-2, 1e-5)
    checker.check_gradient_exhaustive(layer, bottom, top)
def test_slice_by_array_layer(blob_4_2322, tmpdir):
    path_indexes = tmpdir.join('indexes.mat').strpath
    from scipy.io import savemat
    indexes = np.array([2, 0])
    savemat(path_indexes, {'indexes': indexes})
    b, _, _, t = blob_4_2322
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "SliceByArrayLayer"
    lp.python_param.param_str = str({
        'path_mat': path_indexes,
        'key': 'indexes'
    })
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert np.all(top[0].data == bottom[0].data[:, indexes, ...])
    checker = GradientChecker(1e-2, 1e-5)
    checker.check_gradient_exhaustive(layer, bottom, top)
def test_dssim_layer(request):
    if request.config.getoption('caffe_cpu'):
        raise pytest.skip("DSSIMLayer requires GPU")
    x, y = np.ogrid[:5, :5]
    img1 = np.sin(x / 5.0 * np.pi) * np.cos(y / 5.0 * np.pi)
    img1 = np.repeat(img1[..., np.newaxis], 3, 2)
    img1 = (img1 - img1.min()) / (img1.max() - img1.min())
    rng = np.random.RandomState(313)
    img2 = img1 + rng.randn(*img1.shape) * 0.2
    img2[img2 > 1] = 1
    img2[img2 < 0] = 0
    bottom = [caffe.Blob([]), caffe.Blob([])]
    top = [caffe.Blob([])]
    img1 = img1.transpose(2, 0, 1)
    img2 = img2.transpose(2, 0, 1)
    bottom[0].reshape(*((1,) + img1.shape))
    bottom[1].reshape(*((1,) + img2.shape))
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.loss_layers"
    lp.python_param.layer = "DSSIMLayer"
    lp.python_param.param_str = str({'hsize': 3})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    bottom[0].data[...] = img1[np.newaxis]
    bottom[1].data[...] = img2[np.newaxis]
    checker = GradientChecker(1e-3, 1e-2)
    checker.check_gradient_exhaustive(
        layer, bottom, top, check_bottom=[0, 1])
def test_logit_loss_layer(request):
    if request.config.getoption('caffe_cpu'):
        raise pytest.skip("LogitLossLayer requires GPU")
    # data
    y = np.random.rand(2, 3, 4, 5)
    t = (np.random.rand(2, 3, 4, 5) > 0.5).astype(np.float)\
        - (np.random.rand(2, 3, 4, 5) > 0.5).astype(np.float)
    # setting up blobs
    bottom = [caffe.Blob([]), caffe.Blob([])]
    top = [caffe.Blob([])]
    bottom[0].reshape(*y.shape)
    bottom[1].reshape(*t.shape)
    bottom[0].data[...] = y
    bottom[1].data[...] = t
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.loss_layers"
    lp.python_param.layer = "LogitLossLayer"
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    # reference computation
    l = np.sum(np.abs(t) * np.log(1 + np.exp(-y * t))) / np.sum(np.abs(t))
    assert np.isclose(top[0].data, l)
    checker = GradientChecker(1e-3, 1e-2)
    checker.check_gradient_exhaustive(
        layer, bottom, top, check_bottom=[0])
def test_slice_by_array_layer(blob_4_2322, tmpdir):
    path_indexes = tmpdir.join('indexes.mat').strpath
    from scipy.io import savemat
    indexes = np.array([2, 0])
    savemat(path_indexes, {'indexes': indexes})
    b, _, _, t = blob_4_2322
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "SliceByArrayLayer"
    lp.python_param.param_str = str(
        {'path_mat': path_indexes, 'key': 'indexes'})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert np.all(top[0].data == bottom[0].data[:, indexes, ...])
    checker = GradientChecker(1e-2, 1e-5)
    checker.check_gradient_exhaustive(
        layer, bottom, top)
def test_dssim_layer(request):
    if request.config.getoption('caffe_cpu'):
        raise pytest.skip("DSSIMLayer requires GPU")
    x, y = np.ogrid[:5, :5]
    img1 = np.sin(x / 5.0 * np.pi) * np.cos(y / 5.0 * np.pi)
    img1 = np.repeat(img1[..., np.newaxis], 3, 2)
    img1 = (img1 - img1.min()) / (img1.max() - img1.min())
    rng = np.random.RandomState(313)
    img2 = img1 + rng.randn(*img1.shape) * 0.2
    img2[img2 > 1] = 1
    img2[img2 < 0] = 0
    bottom = [caffe.Blob([]), caffe.Blob([])]
    top = [caffe.Blob([])]
    img1 = img1.transpose(2, 0, 1)
    img2 = img2.transpose(2, 0, 1)
    bottom[0].reshape(*((1,) + img1.shape))
    bottom[1].reshape(*((1,) + img2.shape))
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.loss_layers"
    lp.python_param.layer = "DSSIMLayer"
    lp.python_param.param_str = str({'hsize': 3})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    bottom[0].data[...] = img1[np.newaxis]
    bottom[1].data[...] = img2[np.newaxis]
    checker = GradientChecker(1e-3, 1e-2)
    checker.check_gradient_exhaustive(
        layer, bottom, top, check_bottom=[0, 1])
def test_sil2_loss_layer_backward(sil2_loss_layer, blob_4_2322_init):
    layer = sil2_loss_layer
    pred, label, mask, top = blob_4_2322_init
    bottom = [pred, label, mask]
    top = [top]
    checker = GradientChecker(1e-2, 1e-2)
    checker.check_gradient_exhaustive(
        layer, bottom, top, check_bottom=[0, 1])
 def test_euclidean(self):
     lp = caffe_pb2.LayerParameter()
     lp.type = "EuclideanLoss"
     layer = caffe.create_layer(lp)
     layer.SetUp(self.bottom, self.top)
     layer.Reshape(self.bottom, self.top)
     layer.Forward(self.bottom, self.top)
     # manual computation
     loss = np.sum((self.bottom[0].data - self.bottom[1].data) ** 2) \
         / self.bottom[0].shape[0] / 2.0
     self.assertAlmostEqual(float(self.top[0].data), loss, 5)
     checker = GradientChecker(1e-2, 1e-2)
     checker.check_gradient_exhaustive(
         layer, self.bottom, self.top, check_bottom='all')
def test_reduction_layer_sum(blob_4_2322):
    b, _, _, t = blob_4_2322
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "ReductionLayer"
    lp.python_param.param_str = str({'axis': 1, 'op': 'sum'})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert np.all(b.data.sum(layer.axis_, keepdims=True) == t.data)
    checker = GradientChecker(1e-2, 1e-4)
    checker.check_gradient_exhaustive(layer, bottom, top)
def test_reduction_layer_sum(blob_4_2322):
    b, _, _, t = blob_4_2322
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "ReductionLayer"
    lp.python_param.param_str = str({'axis': 1, 'op': 'sum'})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert np.all(b.data.sum(layer.axis_, keepdims=True) == t.data)
    checker = GradientChecker(1e-2, 1e-4)
    checker.check_gradient_exhaustive(
        layer, bottom, top)
def test_axpb_layer(blob_4_2322):
    b, _, _, t = blob_4_2322
    bottom = [b]
    top = [t]
    # Create Layer
    va = 0.7
    vb = -0.3
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "AXPBLayer"
    lp.python_param.param_str = str({'a': va, 'b': vb})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert np.all(va * b.data + vb == t.data)
    checker = GradientChecker(1e-3, 1e-2)
    checker.check_gradient_exhaustive(layer, bottom, top)
 def test_inner_product(self):
     lp = caffe_pb2.LayerParameter()
     lp.type = "InnerProduct"
     lp.inner_product_param.num_output = 3
     layer = caffe.create_layer(lp)
     layer.SetUp([self.bottom[0]], self.top)
     w = self.rng.randn(*layer.blobs[0].shape)
     b = self.rng.randn(*layer.blobs[1].shape)
     layer.blobs[0].data[...] = w
     layer.blobs[1].data[...] = b
     layer.Reshape([self.bottom[0]], self.top)
     layer.Forward([self.bottom[0]], self.top)
     assert np.allclose(
         self.top[0].data,
         np.dot(
             self.bottom[0].data.reshape(self.bottom[0].shape[0], -1), w.T
             ) + b
         )
     checker = GradientChecker(1e-2, 1e-1)
     checker.check_gradient_exhaustive(
         layer, [self.bottom[0]], self.top, check_bottom=[0])
def test_parameter_layer():
    t = caffe.Blob([])
    bottom = []
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "ParameterLayer"
    lp.python_param.param_str = str(
        dict(shape=(2, 3, 2, 2),
             filler="lambda shape, rng: rng.randn(*shape) * 0.01"))
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    assert len(layer.blobs) == 1
    assert layer.blobs[0].shape == (2, 3, 2, 2)
    param_copy = layer.blobs[0].data.copy()
    layer.Forward(bottom, top)
    assert np.allclose(top[0].data, param_copy)
    checker = GradientChecker(1e-3, 1e-5)
    checker.check_gradient_exhaustive(layer, bottom, top)
def test_parameter_layer():
    t = caffe.Blob([])
    bottom = []
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "ParameterLayer"
    lp.python_param.param_str = str(dict(
        shape=(2, 3, 2, 2),
        filler="lambda shape, rng: rng.randn(*shape) * 0.01"))
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    assert len(layer.blobs) == 1
    assert layer.blobs[0].shape == (2, 3, 2, 2)
    param_copy = layer.blobs[0].data.copy()
    layer.Forward(bottom, top)
    assert np.allclose(top[0].data, param_copy)
    checker = GradientChecker(1e-3, 1e-5)
    checker.check_gradient_exhaustive(
        layer, bottom, top)
def test_broadcast_layer():
    ba, c, h, w = [2, 1, 3, 4]
    b, t = caffe.Blob([ba, c, h, w]), caffe.Blob([])
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "BroadcastLayer"
    lp.python_param.param_str = str({'axis': 1, 'num': 3})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert t.shape == (ba, 3, h, w)
    for i in xrange(3):
        assert np.all(b.data == t.data[:, i:i + 1])
    checker = GradientChecker(1e-2, 1e-5)
    checker.check_gradient_exhaustive(layer, bottom, top)
def test_axpb_layer(blob_4_2322):
    b, _, _, t = blob_4_2322
    bottom = [b]
    top = [t]
    # Create Layer
    va = 0.7
    vb = -0.3
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "AXPBLayer"
    lp.python_param.param_str = str({'a': va, 'b': vb})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert np.all(va * b.data + vb == t.data)
    checker = GradientChecker(1e-3, 1e-2)
    checker.check_gradient_exhaustive(
        layer, bottom, top)
def test_lp_normalization_layer(blob_4_2322, lpnorm_params):
    axis, p = lpnorm_params
    b, _, _, t = blob_4_2322
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "LpNormalizationLayer"
    lp.python_param.param_str = str({'axis': axis, 'p': p})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.rand(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    if axis is None:
        axis = tuple(range(1, len(bottom[0].shape)))
    test_top = b.data / ((b.data**p).sum(axis, keepdims=True)**(1. / p))
    assert np.allclose(test_top, t.data)
    checker = GradientChecker(1e-3, 1e-2)
    checker.check_gradient_exhaustive(layer, bottom, top)
Esempio n. 22
0
def test_graident_4_layer():
    ba, c, h, w = (2, 3, 4, 4)
    b = caffe.Blob([ba, c, h, w])
    t = caffe.Blob([])
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.vision_layers"
    lp.python_param.layer = "Gradient4Layer"
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert top[0].shape == (ba, 2, c, h, w)
    assert np.all(top[0].data[:, 0, :, :, :-1] == bottom[0].data[..., :, :-1] -
                  bottom[0].data[..., :, 1:])
    assert np.all(top[0].data[:, 1, :, :-1, :] == bottom[0].data[..., :-1, :] -
                  bottom[0].data[..., 1:, :])
    checker = GradientChecker(1e-2, 1e-4)
    checker.check_gradient_exhaustive(layer, bottom, top)
def test_broadcast_layer():
    ba, c, h, w = [2, 1, 3, 4]
    b, t = caffe.Blob([ba, c, h, w]), caffe.Blob([])
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "BroadcastLayer"
    lp.python_param.param_str = str(
        {'axis': 1, 'num': 3})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert t.shape == (ba, 3, h, w)
    for i in xrange(3):
        assert np.all(b.data == t.data[:, i:i + 1])
    checker = GradientChecker(1e-2, 1e-5)
    checker.check_gradient_exhaustive(
        layer, bottom, top)
def test_lp_normalization_layer(blob_4_2322, lpnorm_params):
    axis, p = lpnorm_params
    b, _, _, t = blob_4_2322
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "LpNormalizationLayer"
    lp.python_param.param_str = str({'axis': axis, 'p': p})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.rand(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    if axis is None:
        axis = tuple(range(1, len(bottom[0].shape)))
    test_top = b.data / ((b.data**p).sum(axis, keepdims=True) ** (1./p))
    assert np.allclose(test_top, t.data)
    checker = GradientChecker(1e-3, 1e-2)
    checker.check_gradient_exhaustive(
        layer, bottom, top)
def test_matrix_mult_layer_backward(matrix_mult_layer):
    layer, bottom, top, _ = matrix_mult_layer
    checker = GradientChecker(1e-3, 1e-2)
    checker.check_gradient_exhaustive(layer, bottom, top)
def test_matrix_mult_layer_backward(matrix_mult_layer):
    layer, bottom, top, _ = matrix_mult_layer
    checker = GradientChecker(1e-3, 1e-2)
    checker.check_gradient_exhaustive(
        layer, bottom, top)